#!/usr/bin/env python3
##
## -----------------------------------------------------------------
## This file is part of WAPT Software Deployment
## Copyright (C) 2012 - 2023 Tranquil IT https://www.tranquil.it
## All Rights Reserved.
##
## WAPT helps systems administrators to efficiently deploy
## setup, update and configure applications.
## ------------------------------------------------------------------
##
import glob
import locale
import logging
import os
import platform
import shutil
import socket
import stat
import subprocess
import sys
import time
import psutil
import netifaces
import platform
import time
import getpass
from iniparse import RawConfigParser
from waptutils import (Version, __version__, all_files, dateof,
datetime2isodate, ensure_list, ensure_unicode,
fileisodate, find_all_files, get_disk_free_space,
hours_minutes, httpdatetime2isodate, isodate2datetime,
time2display, wget, wgets, makepath, killtree, isfile, isdir,
CalledProcessErrorOutput, remove_file, mkdirs, get_main_ip, get_local_IPs,
killalltasks, isrunning, get_sha256, CustomZipFile,
run,run_notfatal,CalledProcessError,RunOutput,RunReader,listening_sockets,
copytree2,default_skip,default_overwrite,default_oncopy,default_overwrite_older)
from urllib.request import getproxies
import requests
import uuid
import hashlib
import bs4 as BeautifulSoup
from setuphelpers import *
[docs]def get_proxies_from_wapt_console():
r"""Return proxy information from the current user WAPT console
>>> get_proxies_from_wapt_console()
{'http': 'http://srvproxy.ad.domain.lan:8080',
'https': 'http://srvproxy.ad.domain.lan:8080'}
"""
proxies = {}
if platform.system() == 'Windows':
waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
else:
waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
if isfile(waptconsole_ini_path):
proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
if proxy_wapt:
proxies = {'http': proxy_wapt, 'https': proxy_wapt}
return proxies
[docs]def bs_find(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r"""Parse html web page with BeautifulSoup and get the first result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')['href']
'https://web-platform-tests.org/'
>>> bs_find('https://www.w3.org/', 'span', 'class', 'alt-logo').string
'W3C'
.. versionadded:: 2.0
"""
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find(element, {attribute: value})
else:
return soup.find(element)
[docs]def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r"""Parse html web page with BeautifulSoup and get a list of the result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find_all('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')[0]['href']
'https://web-platform-tests.org/'
>>> bs_find_all('https://www.w3.org/', 'span', 'class', 'alt-logo')[0].string
'W3C'
.. versionadded:: 2.0
"""
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find_all(element, {attribute:value})
else:
return soup.find_all(element)
[docs]def remove_outdated_binaries(version, list_extensions=["exe", "msi", "deb", "rpm", "dmg", "pkg"], filename_contains=None):
r"""Remove files based on the version contained in his filename, failing over on file version on compatible OSes
Args:
version (str): version number of keeped files
list_extensions (str or list of str): file extensions of compared files
filename_contains (str or list of str): Part of the filename that must be contained (useful for distinguishing architecture and os)
Returns:
list: list of deleted files
.. versionadded:: 2.0
.. versionchanged:: 2.2
Now returns removed files, now checking .exe and .msi file versions
"""
files = []
if type(list_extensions) != list:
list_extensions = [list_extensions]
if filename_contains:
if type(filename_contains) != list:
filename_contains = [filename_contains]
list_extensions = ["." + ext for ext in list_extensions if ext[0] != "."]
for file_ext in list_extensions:
for bin_in_dir in glob.glob("*%s" % file_ext):
if filename_contains:
for filename_contain in filename_contains:
if not filename_contain in bin_in_dir:
remove_file(bin_in_dir)
files.append(bin_in_dir)
if not version in bin_in_dir:
if platform.system() == "Windows":
if file_ext == ".exe" or file_ext == ".msi":
if Version(version, 4) == Version(get_version_from_binary(bin_in_dir, "FileVersion"), 4) or Version(version, 4) == Version(
get_version_from_binary(bin_in_dir, "ProductVersion"), 4
):
print("%s file or product version is correct (%s)" % (bin_in_dir, version))
continue
remove_file(bin_in_dir)
files.append(bin_in_dir)
return print("\n".join(["Removed outdated binary: " + fn for fn in files]))
[docs]def add_double_quotes_around(string):
r"""Return the string with double quotes around
Args:
string (str): a string
"""
return '"'+string+'"'
[docs]def unzip_with_7zip(filename, target=None, filenames=[], extract_with_full_paths=True, recursive=True):
r"""Extract the files from an archive file with 7zip with patterns in filenames to target directory
Args:
filename (str): path to archive file. (can be relative to temporary unzip location of package)
target (str): archive content to target dir location (dir will be created if needed). Default: dirname(archive file) + basename(archive file)
filenames (str or list of str): list of filenames / glob patterns (path sep is normally a slash)
extract_with_full_paths (bool): keeping or not the subfolders of the archive as is
recursive (bool): looking or not for filenames recursively
Returns:
None
.. versionadded:: 2.0
.. versionchanged:: 2.2
changed default target location to make it correspond with unzip()
"""
if not target:
target = makepath(os.path.dirname(os.path.abspath(filename)), os.path.splitext(os.path.basename(filename))[0])
if not isinstance(filenames, list):
filenames = [filenames]
if isfile(makepath(programfiles, '7-Zip', '7z.exe')):
sevenzip_path = makepath(programfiles, '7-Zip', '7z.exe')
elif isfile(makepath(programfiles32, '7-Zip', '7z.exe')):
sevenzip_path = makepath(programfiles32, '7-Zip', '7z.exe')
else:
error("7zip must be installed on this pc to use this function")
if not filenames:
if extract_with_full_paths:
run(r'"%s" x "%s" %s %s -aoa' % (sevenzip_path, filename, '' if not target else '-o"%s"' % target, '' if not recursive else '-r'))
else:
run(r'"%s" e "%s" %s %s -aoa' % (sevenzip_path, filename, '' if not target else '-o"%s"' % target, '' if not recursive else '-r'))
else:
for extract in filenames:
if extract_with_full_paths:
run(r'"%s" x "%s" %s "%s" %s -aoa' % (sevenzip_path, filename, '' if not target else '-o"%s"' % target, extract, '' if not recursive else '-r'))
else:
run(r'"%s" e "%s" %s "%s" %s -aoa' % (sevenzip_path, filename, '' if not target else '-o"%s"' % target, extract, '' if not recursive else '-r'))