hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
29f348ce2221e92c79d25e0d2151332aec4f637c | 1,100 | py | Python | memoro/wsgi.py | bbengfort/memorandi | 4591d26c097513d67e11916583ed043e78e87816 | [
"MIT"
] | null | null | null | memoro/wsgi.py | bbengfort/memorandi | 4591d26c097513d67e11916583ed043e78e87816 | [
"MIT"
] | 18 | 2020-12-02T16:37:21.000Z | 2021-09-22T19:40:37.000Z | memoro/wsgi.py | bbengfort/memorandi | 4591d26c097513d67e11916583ed043e78e87816 | [
"MIT"
] | null | null | null |
# memoro.wsgi
# WSGI config for memoro project.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Sat Nov 28 13:44:01 2020 -0500
#
# Copyright (C) 2020 Bengfort.com
# For license information, see LICENSE
#
# ID: wsgi.py [] [email protected] $
"""
WSGI config for memoro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
##########################################################################
## Imports
##########################################################################
import os
from django.core.wsgi import get_wsgi_application
from dotenv import find_dotenv, load_dotenv
##########################################################################
## Load environment and create WSGI application
##########################################################################
load_dotenv(find_dotenv())
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'memoro.settings.development')
application = get_wsgi_application()
| 28.205128 | 78 | 0.555455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 881 | 0.800909 |
29f40d1447bb8284816ad9a4024cc926058b38fe | 29,679 | py | Python | serialized_data_converter.py | facelessuser/SerializedDataConverter | 6cd0d59ae6cda98208e60e9c729d0eb047fe93db | [
"MIT"
] | 19 | 2015-02-13T08:16:32.000Z | 2021-07-31T02:55:39.000Z | serialized_data_converter.py | facelessuser/SerializedDataConverter | 6cd0d59ae6cda98208e60e9c729d0eb047fe93db | [
"MIT"
] | 9 | 2015-03-10T15:34:08.000Z | 2019-11-18T01:57:20.000Z | serialized_data_converter.py | facelessuser/SerializedDataConverter | 6cd0d59ae6cda98208e60e9c729d0eb047fe93db | [
"MIT"
] | 4 | 2015-10-01T16:04:52.000Z | 2019-10-27T00:53:36.000Z | """
Serialized Data Converter.
Licensed under MIT
Copyright (c) 2012 - 2015 Isaac Muse <[email protected]>
"""
import sublime
import sublime_plugin
import codecs
import re
import traceback
import os
from SerializedDataConverter.lib.log import error_msg
from SerializedDataConverter.lib import plist_includes as plist
from SerializedDataConverter.lib import yaml_includes as yaml
from SerializedDataConverter.lib import json_includes as json
PACKAGE_SETTINGS = "serialized_data_converter.sublime-settings"
def to_hex(value):
"""Convert int value to hex string."""
return "%02x" % value
class SerializedDataConverterListener(sublime_plugin.EventListener):
"""Listener to convert certain files on save."""
def on_post_save(self, view):
"""Convert after saves."""
ext2convert = self.get_save_ext()
filename = view.file_name()
command = None
if filename is not None:
for converter in ext2convert:
ext = converter.get("ext", None)
if ext is not None and filename.lower().endswith(ext.lower()):
command = converter.get("command", None)
break
if command is not None:
self.convert(view, command)
def get_save_ext(self):
"""Get the save extension."""
return sublime.load_settings(PACKAGE_SETTINGS).get("convert_on_save", [])
def convert(self, view, command):
"""Call the appropriate convert command."""
binary = False
save_binary = False
if command.startswith('bplist'):
command = command.replace('bplist', 'plist')
binary = True
elif command.endswith('bplist'):
command = command.replace('bplist', 'plist')
save_binary = True
view.run_command(
"serialized_%s" % command, {
"save_to_file": 'True',
"show_file": False,
"force": True,
"binary": binary,
'save_binary': save_binary
}
)
class _LanguageConverter(sublime_plugin.TextCommand):
"""Language converter base class."""
lang = None
default_lang = "Packages/Text/Plain text.tmLanguage"
errors = {
"filewrite": "Could not write file!\n"
"Please see console for more info.",
"bufferwrite": "Could not write view buffer!\n"
"Please see console for more info.",
"view2yaml": "Could not read view buffer as YAML!\n"
"Please see console for more info.",
"view2json": "Could not read view buffer as JSON!\n"
"Please see console for more info.",
"view2plist": "Could not read view buffer as PLIST!\n"
"Please see console for more info.",
"view2bplist": "Could not read view buffer as Binary PLIST!\n"
"Please see console for more info.",
"yaml2json": "Could not convert YAML to JSON!\n"
"Please see console for more info.",
"json2yaml": "Could not convert JSON to YAML!\n"
"Please see console for more info.",
"plist2yaml": "Could not convert PLIST to YAML!\n"
"Please see console for more info.",
"bplist2yaml": "Could not convert Binary PLIST to YAML!\n"
"Please see console for more info.",
"yaml2plist": "Could not convert YAML to PLIST!\n"
"Please see console for more info.",
"yaml2bplist": "Could not convert YAML to Binary PLIST!\n"
"Please see console for more info.",
"json2plist": "Could not convert JSON to PLIST!\n"
"Please see console for more info.",
"json2bplist": "Could not convert JSON to Binary PLIST!\n"
"Please see console for more info.",
"plist2json": "Could not convert PLIST to JSON!\n"
"Please see console for more info.",
"bplist2json": "Could not convert Binary PLIST to JSON!\n"
"Please see console for more info.",
"bplist2plist": "Could not convert Binary PLIST to PLIST!\n"
"Please see console for more info.",
"plist2bplist": "Could not convert PLIST to Binary PLIST!\n"
"Please see console for more info.",
"binwrite": "Source view does not exist on disk, so save name and location cannot be determined.\n"
"You can convert and save to disk as an XML PLIST and then convert it to BPLIST."
}
def __init__(self, *args, **kwargs):
"""General setup."""
self.settings = sublime.load_settings(PACKAGE_SETTINGS)
super().__init__(*args, **kwargs)
def set_syntax(self):
"""Set the view syntax."""
if self.output_view is not None:
# Get syntax language and set it
self.output_view.set_syntax_file(self.syntax)
def write_file(self, edit, show_file):
"""Write data to a file if a location can be acquired else save to a view buffer."""
errors = False
if self.save_filename is not None and os.path.exists(os.path.dirname(self.save_filename)):
# Save content to UTF file
try:
if self.save_binary:
with open(self.save_filename, "wb") as f:
f.write(self.output)
else:
with codecs.open(self.save_filename, "w", "utf-8") as f:
f.write(self.output)
self.output = None
if show_file:
self.output_view = self.view.window().open_file(self.save_filename)
except Exception:
errors = True
error_msg(self.errors["filewrite"], traceback.format_exc())
if not errors and show_file:
self.set_syntax()
else:
# Could not acquire a name that exists on disk
# Fallback to buffer write
self.write_buffer(edit, force_new_buffer=True)
def write_buffer(self, edit, force_new_buffer=False):
"""Write the data to a view buffer."""
errors = False
new_buffer = bool(self.settings.get("open_in_new_buffer", False))
# Save content to view buffer
try:
self.output_view = self.view.window().new_file() if new_buffer or force_new_buffer else self.view
if self.save_binary:
self.output_view.set_encoding('Hexadecimal')
bin_output = []
count = 0
for b in self.output:
if count % 16 == 0 and count != 0:
bin_output += ['\n', to_hex(b)]
else:
if count % 2 == 0 and count != 0:
bin_output += [' ', to_hex(b)]
else:
bin_output.append(to_hex(b))
count += 1
self.output = None
self.output_view.replace(
edit,
sublime.Region(0, self.view.size()),
''.join(bin_output)
)
bin_output = None
else:
self.output_view.set_encoding('UTF-8')
self.output_view.replace(
edit,
sublime.Region(0, self.view.size()),
self.output
)
self.output = None
except Exception:
errors = True
error_msg(self.errors["bufferwrite"], traceback.format_exc())
if not errors:
if new_buffer or force_new_buffer:
# If a name can be acquired from the original view,
# give buffer a modified derivative of the name.
if self.save_filename is not None:
self.output_view.set_name(os.path.basename(self.save_filename))
self.set_syntax()
def is_enabled(self, **kwargs):
"""Determine if the command should be enabled."""
enabled = True
filename = self.view.file_name()
view_okay = True
if (
kwargs.get('binary', False) and
(filename is None or not os.path.exists(filename)) and
self.view.encoding() != 'Hexadecimal'
):
view_okay = False
if not kwargs.get('force', False):
if (
kwargs.get('save_to_file', False) and
not bool(self.settings.get("enable_save_to_file_commands", False))
):
enabled = False
elif (
not kwargs.get('save_to_file', False) and
not bool(self.settings.get("enable_show_in_buffer_commands", False))
):
enabled = False
if not view_okay and enabled:
enabled = False
return enabled
def get_output_file(self, filename):
"""Get output filename to save to."""
return None
def read_source(self):
"""Read the source."""
return False
def convert(self, edit):
"""Convert the read data to the desired format."""
return False
def run(self, edit, **kwargs):
"""Begin conversion."""
self.binary = kwargs.get('binary', False)
self.save_binary = kwargs.get('save_binary', False)
self.syntax = self.settings.get(self.lang, self.default_lang) if self.lang is not None else self.default_lang
filename = self.view.file_name()
self.save_filename = self.get_output_file(filename) if filename is not None else None
if not self.read_source():
if not self.convert(edit):
if kwargs.get('save_to_file', False):
self.write_file(edit, kwargs.get('show_file', True))
else:
self.write_buffer(edit)
##########################
# Plist <-> YAML
##########################
class SerializedPlistToYamlCommand(_LanguageConverter):
"""Convert PLIST to YAML."""
lang = "yaml_language"
default_lang = "Packages/YAML/YAML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.binary:
setting = 'bplist_yaml_conversion_ext'
src = 'bplist'
else:
setting = 'plist_yaml_conversion_ext'
src = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext[src]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["yaml"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".YAML"
return name
def read_source(self):
"""Read the source."""
errors = False
ext_tbl = self.settings.get("yaml_strip_tabs_from", [])
filename = self.view.file_name()
self.strip_tabs = False
if filename is not None:
for ext in ext_tbl:
m = re.match("^(.*)\\." + re.escape(ext) + "$", filename, re.IGNORECASE)
if m is not None:
self.strip_tabs = True
break
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
if self.binary and self.view.encoding() == 'Hexadecimal':
self.plist = plist.read_plist_from_hex_view(self.view)
elif self.binary and filename is not None and os.path.exists(filename):
self.plist = plist.read_plist_from_file(filename)
else:
self.plist = plist.read_plist_from_view(self.view)
except Exception:
errors = True
error_type = 'view2bplist' if self.binary else 'view2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
if not errors:
# Convert Python dict to JSON buffer.
default_flow_style = None
flow_setting = self.settings.get("yaml_default_flow_style", None)
if flow_setting == "true":
default_flow_style = True
elif flow_setting == "false":
default_flow_style = False
# Convert Python dict to Yaml buffer.
self.output = yaml.yaml_dumps(
self.plist,
default_flow_style=default_flow_style,
indent=self.settings.get("yaml_indent", 4),
strip_tabs=self.strip_tabs,
detect_timestamp=self.settings.get("yaml_detect_timestamp", True)
)
self.plist = None
except Exception:
errors = True
error_type = 'bplist2yaml' if self.binary else 'plist2yaml'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
class SerializedYamlToPlistCommand(_LanguageConverter):
"""Convert YAML to PLIST."""
lang = "plist_language"
default_lang = "Packages/XML/XML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.save_binary:
setting = 'bplist_yaml_conversion_ext'
out = 'bplist'
else:
setting = 'plist_yaml_conversion_ext'
out = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext["yaml"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext[out]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".plist"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Strip comments and dangling commas from view buffer
# Read view buffer as JSON
# Dump data to Python dict
self.yaml = yaml.read_yaml_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2yaml"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
if self.save_binary:
self.output = plist.plist_binary_dumps(
self.yaml,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
else:
self.output = plist.plist_dumps(
self.yaml,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
self.yaml = None
except Exception:
errors = True
error_type = 'yaml2bplist' if self.save_binary else 'yaml2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def run(self, edit, **kwargs):
"""Begin conversion."""
if kwargs.get('save_binary', False):
self.lang = 'bplist_language'
self.default_lang = 'Packages/Text/Plain text.tmLanguage'
else:
self.lang = 'plist_language'
self.default_lang = 'Packages/XML/XML.tmLanguage'
super().run(edit, **kwargs)
##########################
# Plist <-> JSON
##########################
class SerializedPlistToJsonCommand(_LanguageConverter):
"""Convert PLIST to JSON."""
lang = "json_language"
default_lang = "Packages/JavaScript/JSON.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.binary:
setting = 'bplist_json_conversion_ext'
src = 'bplist'
else:
setting = 'plist_json_conversion_ext'
src = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext[src]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["json"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".JSON"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
filename = self.view.file_name()
if self.binary and self.view.encoding() == 'Hexadecimal':
self.plist = plist.read_plist_from_hex_view(self.view)
elif self.binary and filename is not None and os.path.exists(filename):
self.plist = plist.read_plist_from_file(filename)
else:
self.plist = plist.read_plist_from_view(self.view)
except Exception:
errors = True
error_type = 'view2bplist' if self.binary else 'view2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
if not errors:
self.output = json.json_dumps(
self.plist,
preserve_binary=self.settings.get("json_preserve_binary_data", True)
)
self.plist = None
except Exception:
errors = True
error_type = 'bplist2json' if self.binary else 'plist2json'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
class SerializedJsonToPlistCommand(_LanguageConverter):
"""Convert JSON to PLIST."""
lang = "plist_language"
default_lang = "Packages/XML/XML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.save_binary:
setting = 'bplist_json_conversion_ext'
out = 'bplist'
else:
setting = 'plist_json_conversion_ext'
out = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext["json"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext[out]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".plist"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Strip comments and dangling commas from view buffer
# Read view buffer as JSON
# Dump data to Python dict
self.json = json.read_json_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2json"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
if self.save_binary:
self.output = plist.plist_binary_dumps(
self.json,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
else:
self.output = plist.plist_dumps(
self.json,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
self.json = None
except Exception:
errors = True
error_type = 'json2bplist' if self.save_binary else 'json2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def run(self, edit, **kwargs):
"""Begin conversion."""
if kwargs.get('save_binary', False):
self.lang = 'bplist_language'
self.default_lang = 'Packages/Text/Plain text.tmLanguage'
else:
self.lang = 'plist_language'
self.default_lang = 'Packages/XML/XML.tmLanguage'
super().run(edit, **kwargs)
##########################
# YAML <-> JSON
##########################
class SerializedJsonToYamlCommand(_LanguageConverter):
"""Convert JSON to YAML."""
lang = "yaml_language"
default_lang = "Packages/YAML/YAML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
# Try and find file ext in the ext table
for ext in self.settings.get("json_yaml_conversion_ext", []):
m = re.match("^(.*)\\." + re.escape(ext["json"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["yaml"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".YAML"
return name
def read_source(self):
"""Read the source."""
errors = False
ext_tbl = self.settings.get("yaml_strip_tabs_from", [])
filename = self.view.file_name()
self.strip_tabs = False
if filename is not None:
for ext in ext_tbl:
m = re.match("^(.*)\\." + re.escape(ext) + "$", filename, re.IGNORECASE)
if m is not None:
self.strip_tabs = True
break
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
self.json = json.read_json_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2json"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
if not errors:
# Convert Python dict to JSON buffer.
default_flow_style = None
flow_setting = self.settings.get("yaml_default_flow_style", None)
if flow_setting == "true":
default_flow_style = True
elif flow_setting == "false":
default_flow_style = False
self.output = yaml.yaml_dumps(
self.json,
default_flow_style=default_flow_style,
indent=self.settings.get("yaml_indent", 4),
strip_tabs=self.strip_tabs,
detect_timestamp=self.settings.get("yaml_detect_timestamp", True)
)
self.json = None
except Exception:
errors = True
error_msg(self.errors["json2yaml"], traceback.format_exc())
return errors
class SerializedYamlToJsonCommand(_LanguageConverter):
"""Convert YAML to JSON."""
lang = "json_language"
default_lang = "Packages/JavaScript/JSON.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
# Try and find file ext in the ext table
for ext in self.settings.get("json_yaml_conversion_ext", []):
m = re.match("^(.*)\\." + re.escape(ext["yaml"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["json"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".JSON"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Strip comments and dangling commas from view buffer
# Read view buffer as JSON
# Dump data to Python dict
self.yaml = yaml.read_yaml_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2yaml"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
self.output = json.json_dumps(
self.yaml,
preserve_binary=self.settings.get("json_preserve_binary_data", True)
)
self.yaml = None
except Exception:
errors = True
error_msg(self.errors["yaml2json"], traceback.format_exc())
return errors
##########################
# BPLIST <-> PLIST
##########################
class SerializedPlistToPlistCommand(_LanguageConverter):
"""Convert BPLIST <-> PLIST."""
lang = 'plist_language'
default_lang = 'Packages/Text/Plain text.tmLanguage'
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
# Try and find file ext in the ext table
if self.binary:
src = 'bplist'
out = 'plist'
default_out = '.plist'
else:
src = 'plist'
out = 'bplist'
default_out = '.plist'
for ext in self.settings.get('bplist_plist_conversion_ext', []):
m = re.match("^(.*)\\." + re.escape(ext[src]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext[out]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + default_out
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
filename = self.view.file_name()
if self.binary and self.view.encoding() == 'Hexadecimal':
self.plist = plist.read_plist_from_hex_view(self.view)
elif self.binary and filename is not None and os.path.exists(filename):
self.plist = plist.read_plist_from_file(filename)
else:
self.plist = plist.read_plist_from_view(self.view)
except Exception:
errors = True
error_type = 'view2bplist' if self.binary else 'view2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
if self.save_binary:
self.output = plist.plist_binary_dumps(
self.plist,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
else:
self.output = plist.plist_dumps(
self.plist,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
self.plist = None
except Exception:
errors = True
error_type = "bplist2plist" if self.binary else 'plist2bplist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def run(self, edit, **kwargs):
"""Begin conversion."""
if kwargs.get('save_binary', False):
self.lang = 'bplist_language'
self.default_lang = 'Packages/Text/Plain text.tmLanguage'
else:
self.lang = 'plist_language'
self.default_lang = 'Packages/XML/XML.tmLanguage'
super().run(edit, **kwargs)
| 36.371324 | 117 | 0.553253 | 28,767 | 0.969271 | 0 | 0 | 0 | 0 | 0 | 0 | 8,599 | 0.289733 |
29f5d029a675792751ff0f3ac8e9946cca353e7b | 1,592 | py | Python | test.py | SirNate0/PYrho3D | b0daa3badccd12adfcb9e7cf50d554c805cc6279 | [
"MIT"
] | 6 | 2020-02-20T07:42:07.000Z | 2021-03-27T13:26:47.000Z | test.py | SirNate0/PYrho3D | b0daa3badccd12adfcb9e7cf50d554c805cc6279 | [
"MIT"
] | null | null | null | test.py | SirNate0/PYrho3D | b0daa3badccd12adfcb9e7cf50d554c805cc6279 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2.7
import urho
v = urho.Vector3()
c = urho.Context()
fs = urho.FileSystem(c)
from urho import StringHash as sh
import os
print (os.getcwd())
class App(urho.Application):
#def __init__(self, name):
# Dog.__init__(self) # Without this, undefind behavior may occur if the C++ portions are referenced.
def __init__(self,c):
urho.Application.__init__(self,c)
# self.name = name
#def bark(self):
# return "yap!"
def Setup(self):
print 'Setting up the applicaiton'
self.engineParameters["WindowTitle"] = "PYrho3D"
return
def Start(self):
print 'Starting up the applicaiton'
fs = c.GetSubsystem('FileSystem')
commandFile = fs.GetProgramDir() + "Data/CommandLine.txt"
print commandFile
# with open(commandFile) as f:
# line = commandFile[0]
scriptfile = 'Scripts/NinjaSnowWar.as'
c.RegisterSubsystem(urho.Script(c))
cache = c.GetSubsystem('ResourceCache')
sf = cache.GetResource('ScriptFile',scriptfile)
sf.Execute("void Start()")
a = App(c)
#help(a)
var = urho.Variant(u'/home/nathan/Desktop/testClang')
print(var)
print(fs.GetCurrentDir())
#a.engineParameters[urho.StringHash('ResourcePrefixPaths')] = var
#a.engineParameters["FullScreen"] = False
#a.engineParameters[urho.StringHash('FullScreen')] = False
a.engineParameters["WindowWidth"] = 500
c.GetSubsystem(sh('Input')).SetMouseVisible(True)
del fs
c.GetSubsystem(sh('Input')).SetMouseVisible(True)
a.Run()
#ep = a.engineParameters
| 24.875 | 107 | 0.66206 | 957 | 0.601131 | 0 | 0 | 0 | 0 | 0 | 0 | 696 | 0.437186 |
29f6bfc61051a4c8d3929a3bb610dca313e55859 | 7,696 | py | Python | ajustes_UM/tesis/tesis/settings.py | abelgonzalez/ajustes | f6f99aea18cfb82750805321abfc822d8a6ec5ed | [
"MIT"
] | 1 | 2015-03-04T13:04:33.000Z | 2015-03-04T13:04:33.000Z | ajustes_UM/tesis/tesis/settings.py | abelgonzalez/ajustes | f6f99aea18cfb82750805321abfc822d8a6ec5ed | [
"MIT"
] | null | null | null | ajustes_UM/tesis/tesis/settings.py | abelgonzalez/ajustes | f6f99aea18cfb82750805321abfc822d8a6ec5ed | [
"MIT"
] | null | null | null | """
Django settings for tesis project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# -*- coding: utf-8 -*-
# A tuple that lists people who get code error notifications.
ADMINS = (
('Abel González Mondéjar', '[email protected]'),
)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.conf import global_settings
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'a6c$xd0y%_#%&ucf!uzu0cuc)6-+b+t5(63u#a__!^3cnhk)#l'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# A boolean that turns on/off template debug mode.
TEMPLATE_DEBUG = True
# A list of strings representing the host/domain names that this Django site can serve.
ALLOWED_HOSTS = []
# Application definition
# A tuple of strings designating all applications that are enabled in this Django installation
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
# otras apps
'pure_pagination',
'watson',
# Mis Apps
'ajustes',
'persona',
'planEstudio',
# importada y modificada
'main',
)
PAGINATION_SETTINGS = {
'PAGE_RANGE_DISPLAYED': 10,
'MARGIN_PAGES_DISPLAYED': 1,
}
# Middleware is a framework of hooks into Django’s request/response processing.
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# A string representing the full Python import path to your root URLconf.
ROOT_URLCONF = 'tesis.urls'
# The full Python path of the WSGI application object that Django’s built-in servers (e.g. runserver) will use.
WSGI_APPLICATION = 'tesis.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
# A dictionary containing the settings for all databases to be used with Django.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'OPTIONS': {
'read_default_file': os.path.join(BASE_DIR, 'my.cnf'),
'init_command': 'SET storage_engine=INNODB',
},
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
# Language code for this installation.
LANGUAGE_CODE = 'es-CU'
# A boolean that specifies whether Django’s translation system should be enabled.
# This provides an easy way to turn it off, for performance. If this is set to False,
# Django will make some optimizations so as not to load the translation machinery.
USE_I18N = True
# A boolean that specifies if localized formatting of data will be enabled by default or not.
# If this is set to True, e.g. Django will display numbers and dates using the format of the current locale.
USE_L10N = True
# A boolean that specifies if datetimes will be timezone-aware by default or not.
# If this is set to True, Django will use timezone-aware datetimes internally.
# Otherwise, Django will use naive datetimes in local time.
USE_TZ = True
# Number representing the first day of the week.
FIRST_DAY_OF_WEEK = 1
from django.utils.translation import ugettext_lazy as _
# A tuple of all available languages.
LANGUAGES = (
('es', _('Español')),
('en', _('English')),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
# URL to use when referring to static files located in STATIC_ROOT.
# Example: "http://media.lawrence.com/static/"
# Esto debe configurarse de manera similar que el media para poder servir archivos estáticos
# Puede ser algo como esta linea comentada
# STATIC_URL = 'http://localhost:90/static/'
STATIC_URL = '/static/'
# Local time zone for this installation.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Havana'
# List of locations of the template source files searched by django.template.loaders.filesystem.Loader, in search order.
# Note that these paths should use Unix-style forward slashes, even on Windows.
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), '..', 'templates').replace('\\', '/'),)
# This setting defines the additional locations the staticfiles app will traverse if the FileSystemFinder finder is
# enabled, e.g. if you use the collectstatic or findstatic management command or use the static file serving view.
STATICFILES_DIRS = ((os.path.join(BASE_DIR, 'assets')),
(os.path.join(BASE_DIR, 'media')))
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = (os.path.join(BASE_DIR, 'static')) # URL prefix for static files.
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
# MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'assets/upload') # COMENTADO
PROJECT_PATH = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.join("../", PROJECT_PATH)
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media/')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
# Configurar esta línea es importante puede quedar algo así:
# MEDIA_URL = 'http://localhost:90/media/'
# MEDIA_URL = 'http://127.0.0.1:8000/media/' # COMENTADO
# estas las importé también
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# The URL where requests are redirected after login when the contrib.auth.login view gets no next parameter.
LOGIN_REDIRECT_URL = '/'
# The URL where requests are redirected for login, especially when using the login_required() decorator.
LOGIN_URL = '/'
# LOGIN_URL counterpart.
LOGOUT_URL = '/logoutUser'
# TEMPLATE_CONTEXT_PROCESSORS = (
# 'django.contrib.auth.context_processors.auth',
# 'django.core.context_processors.request',
# )
TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + (
"django.core.context_processors.request",
) | 34.204444 | 120 | 0.721674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6,044 | 0.783917 |
29f700b90ab2377c8ba15763c3022ce5834a7f4f | 4,445 | py | Python | python/fe3lmaker/s3-driver.py | flarebyte/wonderful-bazar | 810514cd7d73505b11d738f8b84d91842d18d074 | [
"MIT"
] | null | null | null | python/fe3lmaker/s3-driver.py | flarebyte/wonderful-bazar | 810514cd7d73505b11d738f8b84d91842d18d074 | [
"MIT"
] | null | null | null | python/fe3lmaker/s3-driver.py | flarebyte/wonderful-bazar | 810514cd7d73505b11d738f8b84d91842d18d074 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# This software code is made available "AS IS" without warranties of any
# kind. You may copy, display, modify and redistribute the software
# code either by itself or as incorporated into your code; provided that
# you do not remove any proprietary notices. Your use of this software
# code is at your own risk and you waive any claim against Amazon
# Digital Services, Inc. or its affiliates with respect to your use of
# this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
# affiliates.
import S3
import time
import sys
AWS_ACCESS_KEY_ID = '<INSERT YOUR AWS ACCESS KEY ID HERE>'
AWS_SECRET_ACCESS_KEY = '<INSERT YOUR AWS SECRET ACCESS KEY HERE>'
# remove these next two lines when you've updated your credentials.
print "update s3-driver.py with your AWS credentials"
sys.exit();
# convert the bucket to lowercase for vanity domains
# the bucket name must be lowercase since DNS is case-insensitive
BUCKET_NAME = AWS_ACCESS_KEY_ID.lower() + '-test-bucket'
KEY_NAME = 'test-key'
conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
generator = S3.QueryStringAuthGenerator(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
# Check if the bucket exists. The high availability engineering of
# Amazon S3 is focused on get, put, list, and delete operations.
# Because bucket operations work against a centralized, global
# resource space, it is not appropriate to make bucket create or
# delete calls on the high availability code path of your application.
# It is better to create or delete buckets in a separate initialization
# or setup routine that you run less often.
if (conn.check_bucket_exists(BUCKET_NAME).status == 200):
print '----- bucket already exists! -----'
else:
print '----- creating bucket -----'
print conn.create_located_bucket(BUCKET_NAME, S3.Location.DEFAULT).message
# to create an EU bucket
#print conn.create_located_bucket(BUCKET_NAME, S3.Location.EU).message
print '----- bucket location -----'
print conn.get_bucket_location(BUCKET_NAME).location
print '----- listing bucket -----'
print map(lambda x: x.key, conn.list_bucket(BUCKET_NAME).entries)
print '----- putting object (with content type) -----'
print conn.put(
BUCKET_NAME,
KEY_NAME,
S3.S3Object('this is a test'),
{ 'Content-Type': 'text/plain' }).message
print '----- listing bucket -----'
print map(lambda x: x.key, conn.list_bucket(BUCKET_NAME).entries)
print '----- getting object -----'
print conn.get(BUCKET_NAME, KEY_NAME).object.data
print '----- query string auth example -----'
print "\nTry this url out in your browser (it will only be valid for 60 seconds).\n"
generator.set_expires_in(60);
url = generator.get(BUCKET_NAME, KEY_NAME)
print url
print '\npress enter> ',
sys.stdin.readline()
print "\nNow try just the url without the query string arguments. it should fail.\n"
print generator.make_bare_url(BUCKET_NAME, KEY_NAME)
print '\npress enter> ',
sys.stdin.readline()
print '----- putting object with metadata and public read acl -----'
print conn.put(
BUCKET_NAME,
KEY_NAME + '-public',
S3.S3Object('this is a publicly readable test'),
{ 'x-amz-acl': 'public-read' , 'Content-Type': 'text/plain' }
).message
print '----- anonymous read test ----'
print "\nYou should be able to try this in your browser\n"
public_key = KEY_NAME + '-public'
print generator.make_bare_url(BUCKET_NAME, public_key)
print "\npress enter> ",
sys.stdin.readline()
print "----- getting object's acl -----"
print conn.get_acl(BUCKET_NAME, KEY_NAME).object.data
print "\n----- path style url example -----";
print "Non-location-constrained buckets can also be specified as part of the url path. (This was the original url style supported by S3.)\n";
print "Try this url out in your browser (it will only be valid for 60 seconds).\n"
generator.calling_format = S3.CallingFormat.PATH
url = generator.get(BUCKET_NAME, KEY_NAME)
print url
print "\npress enter> ",
sys.stdin.readline()
print '----- deleting objects -----'
print conn.delete(BUCKET_NAME, KEY_NAME).message
print conn.delete(BUCKET_NAME, KEY_NAME + '-public').message
print '----- listing bucket -----'
print map(lambda x: x.key, conn.list_bucket(BUCKET_NAME).entries)
print '----- listing all my buckets -----'
print map(lambda x: x.name, conn.list_all_my_buckets().entries)
print '----- deleting bucket ------'
print conn.delete_bucket(BUCKET_NAME).message
| 37.352941 | 142 | 0.730259 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,600 | 0.584927 |
29f709dd701c60c4489620b7e5b46e5aca1a0daf | 7,468 | py | Python | code/lib/models/FCRN_depth.py | santomon/taskonomy | 4b22087a2686172b21b61589831061e7a386fe36 | [
"MIT"
] | 789 | 2018-03-21T05:28:38.000Z | 2022-03-29T19:32:47.000Z | code/lib/models/FCRN_depth.py | santomon/taskonomy | 4b22087a2686172b21b61589831061e7a386fe36 | [
"MIT"
] | 46 | 2018-05-03T07:11:10.000Z | 2022-03-11T23:26:03.000Z | code/lib/models/FCRN_depth.py | santomon/taskonomy | 4b22087a2686172b21b61589831061e7a386fe36 | [
"MIT"
] | 152 | 2018-03-24T10:20:44.000Z | 2022-02-09T02:38:10.000Z |
from __future__ import absolute_import, division, print_function
from models.base_net import BaseNet
import losses.all as losses_lib
import tensorflow as tf
import tensorflow.contrib.slim as slim
import numpy as np
import pdb
import optimizers.train_steps as train_steps
import optimizers.ops as optimize
from functools import partial
import models.fcrn
from models.fcrn import ResNet50UpProj
class FCRN_depth(BaseNet):
'''Standard encoder decoder model
Encodes an input into a low-dimensional representation and reconstructs
the input from the low-dimensional representation. Uses l2 loss.
Assumes inputs are scaled to [0, 1] (which will be rescaled to [-1, 1].
'''
def __init__(self, global_step, cfg):
'''
Args:
cfg: Configuration.
'''
super(FCRN_depth, self).__init__(global_step, cfg)
if 'hidden_size' not in cfg:
raise ValueError( "config.py for encoder-decoder must specify 'hidden_size'" )
#self.ones_mask = self.build_ones_mask()
def build_ones_mask(self):
'''Build a mask of ones which has the same size as the input.
'''
cfg = self.cfg
H, W = cfg['target_dim']
C = cfg['target_num_channels']
batch_size = cfg['batch_size']
mask = tf.constant(1.0, dtype=cfg['target_dtype'], shape=[batch_size, H, W, C],
name='identity_mask')
return mask
def _compute_nnz_mask(self, mask):
'''Compute the number of nonzero elements in a tensor which only
contains elements of 0 or 1 (such as a mask).
'''
return tf.reduce_sum(mask)
def build_model(self, input_imgs, is_training, targets=None, masks=None, privileged_input=None):
'''Builds the model. Assumes that the input is from range [0, 1].
Args:
input_imgs: list of input images (scaled between -1 and 1) with the
dimensions specified in the cfg
is_training: flag for whether the model is in training mode or not
mask: mask used for computing sum of squares loss. If None, we assume
it is np.ones.
'''
print('building model')
cfg = self.cfg
self.is_training = is_training
if masks is None:
masks = tf.constant( 1, dtype=tf.float32, shape=[], name='constant_mask' )
net = ResNet50UpProj({'data': input_imgs}, cfg['batch_size'], 1, False)
decoder_output = net.get_output()
decoder_output = decoder_output * 128.
decoder_output = tf.log(decoder_output + 1.) / 11.090354888959125
# if self.decoder_only:
# encoder_output = input_imgs Assume that the input is the representation
# else:
# encoder_output = self.build_encoder(input_imgs, is_training)
# print("enc:", encoder_output.shape)
# decoder_output = self.build_decoder(encoder_output, is_training)
# print("tar:", targets.shape)
# set up losses
if targets is None:
losses = self.get_losses( decoder_output, input_imgs, masks )
else:
losses = self.get_losses( decoder_output, targets, masks )
# use weight regularization
if 'omit_weight_reg' in cfg and cfg['omit_weight_reg']:
add_reg = False
else:
add_reg = True
# get losses
#regularization_loss = tf.add_n( slim.losses.get_regularization_losses(), name='losses/regularization_loss' )
#total_loss = slim.losses.get_total_loss( add_regularization_losses=add_reg,
# name='losses/total_loss')
self.input_images = input_imgs
self.target_images = targets
self.targets = targets
self.masks = masks
self.decoder_output = decoder_output
self.losses = losses
self.total_loss = losses[0]
# self.init_op = tf.global_variables_initializer()
# add summaries
if self.extended_summaries:
slim.summarize_variables()
slim.summarize_weights()
slim.summarize_biases()
slim.summarize_activations()
slim.summarize_collection(tf.GraphKeys.LOSSES)
#slim.summarize_tensor( regularization_loss )
#slim.summarize_tensor( total_loss )
self.model_built = True
def get_losses( self, output_imgs, desired_imgs, masks ):
'''Returns the loss. May be overridden.
Args:
output_imgs: Tensor of images output by the decoder.
desired_imgs: Tensor of target images to be output by the decoder.
masks: Tensor of masks to be applied when computing sum of squares
loss.
Returns:
losses: list of tensors representing each loss component
'''
print('setting up losses...')
self.output_images = output_imgs
self.target_images = desired_imgs
self.masks = masks
with tf.variable_scope('losses'):
l1_loss = losses_lib.get_l1_loss_with_mask(
self.output_images,
self.target_images,
self.masks,
scope='d1')
losses = [l1_loss]
return losses
def get_classification_loss(self, logits, labels):
with tf.variable_scope('losses'):
classification_loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(# slim.losses.sparse_softmax_cross_entropy(
logits, labels, name='softmax_loss'))
slim.losses.add_loss(classification_loss)
losses = [classification_loss]
return losses
def get_train_step_fn( self ):
'''
Returns:
A train_step funciton which takes args:
(sess, train_ops, global_stepf)
'''
return partial( train_steps.discriminative_train_step_fn,
return_accuracy=False )
def build_train_op( self, global_step ):
'''
Builds train ops for discriminative task
Args:
global_step: A Tensor to be incremented
Returns:
[ loss_op, accuracy ]
'''
if not self.model_built or self.total_loss is None :
raise RuntimeError( "Cannot build optimizers until 'build_model' ({0}) and 'get_losses' {1} are run".format(
self.model_built, self.losses_built ) )
self.global_step = global_step
t_vars = tf.trainable_variables()
# Create the optimizer train_op for the generator
self.optimizer = optimize.build_optimizer( global_step=self.global_step, cfg=self.cfg )
if 'clip_norm' in self.cfg:
self.loss_op = optimize.create_train_op( self.total_loss, self.optimizer, update_global_step=True, clip_gradient_norm=self.cfg['clip_norm'])
else:
if self.is_training:
self.loss_op = optimize.create_train_op( self.total_loss, self.optimizer, update_global_step=True )
else:
self.loss_op = optimize.create_train_op( self.total_loss, self.optimizer, is_training=False, update_global_step=True )
# Create a train_op for the discriminator
self.train_op = [ self.loss_op, 0 ]
self.train_op_built = True
return self.train_op
| 38.494845 | 152 | 0.622657 | 7,067 | 0.946304 | 0 | 0 | 0 | 0 | 0 | 0 | 3,039 | 0.406936 |
29f82c973044d39870f0f41f75666b3782377f54 | 13,459 | py | Python | tests/test_handler.py | Tas-Kit/platform | 34e1abb3f85b9649cbf18496333bf35f74aa6e3d | [
"Apache-2.0"
] | null | null | null | tests/test_handler.py | Tas-Kit/platform | 34e1abb3f85b9649cbf18496333bf35f74aa6e3d | [
"Apache-2.0"
] | null | null | null | tests/test_handler.py | Tas-Kit/platform | 34e1abb3f85b9649cbf18496333bf35f74aa6e3d | [
"Apache-2.0"
] | null | null | null | # trigger build
import json
import uuid
import pytest
from mock import MagicMock, patch
from src import handler, db
from src.models import User, MiniApp, TObject
from src.constants import ROLE
from werkzeug.exceptions import BadRequest
@patch('src.db.push', side_effect=Exception)
def test_execute_obj_post_exception(mock_push):
children = [{
'labels': ['Person', 'Worker'],
'properties': {'age': 10, 'name': 'Owen'}
}, {
'labels': ['Car', 'Tesla'],
'properties': {'age': 3, 'model': 'S'}
}]
user = MagicMock()
obj = MagicMock()
with pytest.raises(Exception):
handler.execute_obj_post(user, obj, ROLE.OWNER, children)
@patch('src.db.push', side_effect=TypeError)
def test_execute_obj_post_error(mock_push):
children = [{
'labels': ['Person', 'Worker'],
'properties': {'age': 10, 'name': 'Owen'}
}, {
'labels': ['Car', 'Tesla'],
'properties': {'age': 3, 'model': 'S'}
}]
user = MagicMock()
obj = MagicMock()
with pytest.raises(BadRequest):
handler.execute_obj_post(user, obj, ROLE.OWNER, children)
@patch('src.handler.serialize_objs')
@patch('src.db.push')
def test_execute_obj_post_success(mock_push, mock_serialize_objs):
children = [{
'labels': ['Person', 'Worker'],
'properties': {'age': 10, 'name': 'Owen'}
}, {
'labels': ['Car', 'Tesla'],
'properties': {'age': 3, 'model': 'S'}
}]
user = MagicMock()
obj = MagicMock()
mock_serialize_objs.return_value = 'result'
assert 'result' == handler.execute_obj_post(user, obj, ROLE.OWNER, children)
mock_serialize_objs.assert_called_once()
args = mock_serialize_objs.call_args_list[0][0]
assert args[0] == user
person = args[1][0]
person_labels = list(person.__node__.labels)
person_labels.remove('TObject')
person_properties = dict(person.__node__)
del person_properties['oid']
assert sorted(person_labels) == sorted(['Person', 'Worker'])
assert person_properties == {'age': 10, 'name': 'Owen'}
car = args[1][1]
car_labels = list(car.__node__.labels)
car_labels.remove('TObject')
car_properties = dict(car.__node__)
del car_properties['oid']
assert sorted(car_labels) == sorted(['Car', 'Tesla'])
assert car_properties == {'age': 3, 'model': 'S'}
assert args[2] == ROLE.OWNER
def test_execute_obj_post_no_permission():
with pytest.raises(BadRequest):
handler.execute_obj_post(MagicMock(), MagicMock(), ROLE.STANDARD, MagicMock())
@patch('src.db.pull')
@patch('src.handler.execute_obj_post')
@patch('src.handler.execute_obj_delete')
def test_execute_obj_replace(mock_execute_obj_delete, mock_execute_obj_post, mock_pull):
user = MagicMock()
obj = MagicMock()
role = ROLE.ADMIN
oid_list = MagicMock()
children = MagicMock()
result = MagicMock()
mock_execute_obj_post.return_value = result
assert result == handler.execute_obj_replace(user, obj, role, oid_list, children)
mock_execute_obj_delete.assert_called_once_with(obj, role, oid_list)
mock_execute_obj_post.assert_called_once_with(user, obj, role, children)
@patch('src.handler.Subgraph')
@patch('src.db.run', side_effect=Exception)
def test_execute_obj_delete_error(mock_run, mock_subgraph):
obj = MagicMock()
child1 = MagicMock()
child2 = MagicMock()
child3 = MagicMock()
child1.oid = 'oid1'
child2.oid = 'oid2'
child3.oid = 'oid3'
child1.__node__ = 'child1'
child2.__node__ = 'child2'
child3.__node__ = 'child3'
node1 = MagicMock()
node2 = MagicMock()
node3 = MagicMock()
node4 = MagicMock()
node5 = MagicMock()
node6 = MagicMock()
node1.__node__ = 'node1'
node2.__node__ = 'node2'
node3.__node__ = 'node3'
node4.__node__ = 'node4'
node5.__node__ = 'node5'
node6.__node__ = 'node6'
child1.get_all_children.return_value = [node1, node2]
child2.get_all_children.return_value = [node3, node4]
child3.get_all_children.return_value = [node5, node6]
obj.children = [child1, child2, child3]
oid_list = ['oid0', 'oid1', 'oid3', 'oid4']
subgraph = MagicMock()
mock_subgraph.return_value = subgraph
with pytest.raises(BadRequest):
handler.execute_obj_delete(obj, ROLE.ADMIN, oid_list)
@patch('src.handler.Subgraph')
@patch('src.db.run')
def test_execute_obj_delete_success(mock_run, mock_subgraph):
obj = MagicMock()
child1 = MagicMock()
child2 = MagicMock()
child3 = MagicMock()
child1.oid = 'oid1'
child2.oid = 'oid2'
child3.oid = 'oid3'
child1.__node__ = 'child1'
child2.__node__ = 'child2'
child3.__node__ = 'child3'
node1 = MagicMock()
node2 = MagicMock()
node3 = MagicMock()
node4 = MagicMock()
node5 = MagicMock()
node6 = MagicMock()
node1.__node__ = 'node1'
node2.__node__ = 'node2'
node3.__node__ = 'node3'
node4.__node__ = 'node4'
node5.__node__ = 'node5'
node6.__node__ = 'node6'
child1.get_all_children.return_value = [node1, node2]
child2.get_all_children.return_value = [node3, node4]
child3.get_all_children.return_value = [node5, node6]
obj.children = [child1, child2, child3]
oid_list = ['oid0', 'oid1', 'oid3', 'oid4']
subgraph = MagicMock()
mock_subgraph.return_value = subgraph
assert 'SUCCESS' == handler.execute_obj_delete(obj, ROLE.ADMIN, oid_list)
mock_run.assert_called_once_with("MATCH (a:TObject)-[*0..]->(x:TObject) WHERE a.oid IN ['oid0', 'oid1', 'oid3', 'oid4'] DETACH DELETE x")
def test_execute_obj_delete_no_permission():
obj = MagicMock()
oid_list = []
with pytest.raises(BadRequest):
handler.execute_obj_delete(obj, ROLE.STANDARD, oid_list)
def test_serialize_objs():
obj1 = MagicMock(oid='oid1')
obj2 = MagicMock(oid='oid2')
obj1.serialize.return_value = 'obj1'
obj2.serialize.return_value = 'obj2'
objs = [obj1, obj2]
user = MagicMock()
assert {'oid1': 'obj1', 'oid2': 'obj2'} == handler.serialize_objs(user, objs, ROLE.ADMIN)
obj1.serialize.assert_called_once_with(user, ROLE.ADMIN)
obj2.serialize.assert_called_once_with(user, ROLE.ADMIN)
@patch('src.handler.get_graph_obj')
def test_get_obj_by_id_get_wrong_obj(mock_get_graph_obj):
user = MagicMock()
obj = MagicMock()
mock_get_graph_obj.return_value = obj
data = {
'_id': 'test_id'
}
with pytest.raises(BadRequest):
handler.get_obj_by_id(user, 'wrong_id', data)
@patch('src.utils.assert_standard')
@patch('src.handler.get_graph_obj')
def test_get_obj_by_id_platform(mock_get_graph_obj, mock_assert_standard):
user = MagicMock()
user.share.get.return_value = 5
obj = MagicMock()
mock_get_graph_obj.return_value = obj
data = {
'_id': 'platform'
}
assert obj is handler.get_obj_by_id(user, 'wrong_id', data)
assert data['role'] == 5
@patch('src.handler.get_graph_obj')
def test_get_obj_by_id_get_obj(mock_get_graph_obj):
user = MagicMock()
obj = MagicMock()
mock_get_graph_obj.return_value = obj
data = {
'_id': 'test_id'
}
assert obj == handler.get_obj_by_id(user, 'test_id', data)
mock_get_graph_obj.assert_called_once_with('test_id', TObject)
@patch('src.handler.get_graph_obj')
def test_get_obj_by_id_get_app(mock_get_graph_obj):
user = MagicMock()
obj = MagicMock()
mock_get_graph_obj.return_value = obj
data = {
'_id': 'test_id'
}
assert obj == handler.get_obj_by_id(user, 'root', data)
mock_get_graph_obj.assert_called_once_with('test_id', MiniApp)
@patch('src.handler.get_graph_obj')
def test_get_mini_apps(mock_get_graph_obj):
user = MagicMock()
app1 = MagicMock()
app2 = MagicMock()
app1.serialize.return_value = 'app1'
app2.serialize.return_value = 'app2'
user.apps = [app1, app2]
mock_get_graph_obj.return_value = user
assert handler.get_mini_apps('test_uid') == {
'mini_apps': ['app1', 'app2']
}
user.verify_key.assert_not_called()
mock_get_graph_obj.assert_called_once_with('test_uid', User)
@patch('src.handler.get_graph_obj')
def test_get_mini_app(mock_get_graph_obj):
user = MagicMock()
app = MagicMock()
app.serialize.return_value = 'mock_app'
mock_get_graph_obj.side_effect = [user, app]
assert handler.get_mini_app('test_uid', 'test_aid', 'test_platform_root_key') == {
'mini_app': 'mock_app'
}
assert mock_get_graph_obj.call_count == 2
user.verify_key.assert_called_once_with('test_platform_root_key')
@patch('src.handler.get_graph_obj')
def test_get_platform_root_key(mock_get_graph_obj):
user = MagicMock()
mock_get_graph_obj.return_value = user
user.generate_platform_root_key.return_value = 'platform_root_key'
assert handler.get_platform_root_key('test_uid') == {
'platform_root_key': 'platform_root_key'
}
mock_get_graph_obj.assert_called_once_with('test_uid', User)
def test_get_graph_obj_not_exist():
with pytest.raises(BadRequest):
handler.get_graph_obj('none existing aid', MiniApp)
def test_get_graph_obj_user_not_exist():
uid = str(uuid.uuid4())
u = handler.get_graph_obj(uid, User)
assert u.uid == uid
db.delete(u)
def test_get_graph_obj_exist():
app = MiniApp()
aid = str(uuid.uuid4())
app.aid = aid
db.push(app)
db.pull(app)
assert app == handler.get_graph_obj(aid, MiniApp)
db.delete(app)
@patch('src.handler.serialize_objs', return_value='serialize_results')
@patch('src.handler.handle_obj_params')
def test_handle_obj_get(mock_handle_obj_params, mock_serialize_objs):
parser = MagicMock()
user = MagicMock()
obj = MagicMock()
obj.children = ['test1', 'test2']
mock_handle_obj_params.return_value = {
'user': user,
'obj': obj,
'role': ROLE.ADMIN
}
assert {'result': 'serialize_results'} == handler.handle_obj_get('test_oid', parser)
mock_handle_obj_params.assert_called_once_with('test_oid', parser)
mock_serialize_objs.assert_called_once_with(user, obj.children, ROLE.ADMIN)
def test_decorator():
def dec(func):
def wrapper(a, b):
return func(a + b)
return wrapper
@dec
def main(foo):
return foo
assert 6 == main(5, 1)
def test_extra_params():
params = {
'user': 'u',
'app': 'a'
}
def func(user, **kwargs):
return user
assert 'u' == func(**params)
@patch('src.handler.get_obj_by_id')
@patch('src.handler.get_graph_obj')
def test_handle_obj_params(mock_get_graph_obj,
mock_get_obj_by_id):
user = MagicMock(spec=User)
data = {
'uid': 'test_uid',
'_id': 'test_oid',
'role': ROLE.OWNER,
'exp': 123456
}
obj = MagicMock()
mock_get_graph_obj.return_value = user
user.verify_key.return_value = data
mock_get_obj_by_id.return_value = obj
oid_list = ['oid1', 'oid2']
children = [
{
'labels': ['People', 'Worker'],
'properties': {
'name': 'Owen',
'age': '22'
}
}
]
parser = MagicMock()
parser.parse_args.return_value = {
'uid': 'test_uid',
'key': 'test_key',
'oid_list': oid_list,
'children': children
}
params = handler.handle_obj_params('test_oid', parser)
mock_get_graph_obj.assert_called_once_with('test_uid', User)
assert params == {
'user': user,
'obj': obj,
'role': ROLE.OWNER,
'oid_list': ['oid1', 'oid2'],
'children': children
}
@patch('src.db.push')
def test_execute_obj_patch_update(mock_push):
target_user = MagicMock()
target_user.share.get.return_value = 0
assert handler.execute_obj_patch(MagicMock(), 10, target_user, 5) == 'SUCCESS'
target_user.share.update.assert_called_once()
@patch('src.db.push')
def test_execute_obj_patch_remove(mock_push):
target_user = MagicMock()
target_user.share.get.return_value = 0
assert handler.execute_obj_patch(MagicMock(), 10, target_user, -1) == 'SUCCESS'
target_user.share.remove.assert_called_once()
@patch('src.db.push')
def test_execute_obj_patch_no_enough_permission(mock_push):
target_user = MagicMock()
target_user.share = MagicMock()
target_user.share.get.return_value = 5
with pytest.raises(BadRequest):
handler.execute_obj_patch(MagicMock(), 5, target_user, 0) == 'SUCCESS'
def test_handle_obj_patch_root():
with pytest.raises(BadRequest):
handler.handle_obj_patch('root', '')
@patch('src.handler.get_obj_by_id', return_value='obj')
@patch('src.handler.get_graph_obj')
@patch('src.handler.execute_obj_patch', return_value='hello')
def test_handle_obj_patch(mock_execute_obj_patch, mock_get_graph_obj, mock_get_obj_by_id):
user1 = MagicMock()
user1.verify_key.return_value = {
'role': 5
}
user2 = MagicMock()
mock_get_graph_obj.side_effect = [user1, user2]
arg_parser = MagicMock()
arg_parser.parse_args.return_value = {
'uid': 'myuid',
'key': 'mykey',
'target_uid': 'mytarget_uid',
'target_role': 0
}
assert handler.handle_obj_patch('oid', arg_parser) == {
'result': 'hello'
}
mock_execute_obj_patch.assert_called_once_with(
obj='obj', role=5, target_user=user2, target_role=0)
| 31.155093 | 141 | 0.666691 | 0 | 0 | 0 | 0 | 11,434 | 0.849543 | 0 | 0 | 2,310 | 0.171632 |
29f8d1a4f8b0cea46b5286a6c9367ca7d6ae25dc | 579 | py | Python | ersilia/utils/identifiers/long.py | ersilia-os/ersilia | eded117d6c7029ce4a497effdb514c21edfe3673 | [
"MIT"
] | 32 | 2020-07-30T20:31:05.000Z | 2022-03-31T17:27:14.000Z | ersilia/utils/identifiers/long.py | ersilia-os/ersilia | eded117d6c7029ce4a497effdb514c21edfe3673 | [
"MIT"
] | 59 | 2022-03-21T10:00:04.000Z | 2022-03-31T23:03:14.000Z | ersilia/utils/identifiers/long.py | ersilia-os/ersilia | eded117d6c7029ce4a497effdb514c21edfe3673 | [
"MIT"
] | 44 | 2022-03-17T13:11:07.000Z | 2022-03-31T19:44:16.000Z | try:
import uuid
except ModuleNotFoundError as err:
uuid = None
ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
PATTERN = [8, 4, 4, 4, 12]
SEP = "-"
class LongIdentifier(object):
def __init__(self):
super().__init__()
@staticmethod
def encode():
"""Get UUID code (long identifier)"""
if uuid is None:
alphabet = ALPHABET.lower()
for n in PATTERN:
s += ["".join([random.choice(alphabet) for _ in range(n)])]
return "-".join(s)
else:
return str(uuid.uuid4())
| 23.16 | 75 | 0.56304 | 416 | 0.71848 | 0 | 0 | 330 | 0.569948 | 0 | 0 | 83 | 0.143351 |
29f8e7164c007819cd3fd8ace5f9b2b1776fa29b | 19,622 | py | Python | client/buck_project_builder/tests/builder_test.py | aspin/pyre-check | fe78b41789ba3ef091b0b021d9a1c6267905a7f8 | [
"MIT"
] | null | null | null | client/buck_project_builder/tests/builder_test.py | aspin/pyre-check | fe78b41789ba3ef091b0b021d9a1c6267905a7f8 | [
"MIT"
] | null | null | null | client/buck_project_builder/tests/builder_test.py | aspin/pyre-check | fe78b41789ba3ef091b0b021d9a1c6267905a7f8 | [
"MIT"
] | null | null | null | # Copyright (c) 2019-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import glob
import unittest
from typing import List, Optional
from unittest.mock import MagicMock, patch
from .. import BuilderException, FastBuckBuilder, Target, parser
from ..build_target import (
BuildTarget,
PythonBinary,
PythonLibrary,
PythonWheel,
ThriftLibrary,
)
from ..filesystem import Sources
from .test_common import base
class BuilderTest(unittest.TestCase):
def assert_target_names_equal(self, list_a: List[str], list_b: List[str]) -> None:
self.assertListEqual(sorted(list_a), sorted(list_b))
def assert_targets_equal(
self, targets: List[BuildTarget], target_names: List[str]
) -> None:
self.assert_target_names_equal(
[target.target for target in targets], target_names
)
def assert_raises_builder_exception(self, function, *args, expected_targets=None):
try:
function(*args)
except BuilderException as error:
self.assert_target_names_equal(error.targets, expected_targets)
else:
self.fail("Expected BuilderException to be thrown.")
def test_parse_target(self):
builder = FastBuckBuilder("/ROOT")
self.assertEqual(builder._parse_target("//a:b"), Target("a", "b"))
self.assert_raises_builder_exception(
builder._parse_target, "//a:", expected_targets=["//a:"]
)
self.assert_raises_builder_exception(
builder._parse_target, "//a/...", expected_targets=["//a/..."]
)
def test_compute_targets_to_build_simple(self):
# Dependency graph:
# a
# / \
# b <- c
# | /
# d e
build_file = MagicMock()
build_file.targets = {
"a": PythonBinary(
"/ROOT", "project", base("a", ["//project:b", "//project:c"])
),
"b": PythonLibrary("/ROOT", "project", base("b", ["//project:d"])),
"c": PythonLibrary(
"/ROOT", "project", base("c", ["//project:b", "//project:d"])
),
"d": PythonLibrary("/ROOT", "project", base("d")),
"e": PythonLibrary("/ROOT", "project", base("e")),
}
with patch.object(parser.Parser, "parse_file", return_value=build_file):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project:a"])
self.assert_targets_equal(
targets, ["//project:a", "//project:b", "//project:c", "//project:d"]
)
targets = builder.compute_targets_to_build(["//project:b"])
self.assert_targets_equal(targets, ["//project:b", "//project:d"])
targets = builder.compute_targets_to_build(["//project:c"])
self.assert_targets_equal(
targets, ["//project:b", "//project:c", "//project:d"]
)
targets = builder.compute_targets_to_build(["//project:d"])
self.assert_targets_equal(targets, ["//project:d"])
targets = builder.compute_targets_to_build(["//project:e"])
self.assert_targets_equal(targets, ["//project:e"])
targets = builder.compute_targets_to_build(["//project:a", "//project:e"])
self.assert_targets_equal(
targets,
[
"//project:a",
"//project:b",
"//project:c",
"//project:d",
"//project:e",
],
)
self.assert_raises_builder_exception(
builder.compute_targets_to_build,
["//project:e", "//project:f", "//project:g"],
expected_targets=["//project:f", "//project:g"],
)
builder = FastBuckBuilder("/ROOT", fail_on_unbuilt_target=False)
targets = builder.compute_targets_to_build(
["//project:e", "//project:f", "//project:g"]
)
self.assert_targets_equal(targets, ["//project:e"])
def test_compute_targets_to_build_complex(self):
# Dependency graph:
# a
# / \
# b c
# | |
# d <- e
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary(
"/ROOT", "project1", base("a", ["//project1:b", "//project2:c"])
),
"b": PythonLibrary("/ROOT", "project1", base("b", ["//project2:d"])),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"c": PythonLibrary("/ROOT", "project2", base("c", ["//project2:e"])),
"d": PythonLibrary("/ROOT", "project2", base("d")),
"e": PythonLibrary("/ROOT", "project2", base("e", ["//project2:d"])),
}
build_file_mapping = {"project1": build_file_1, "project2": build_file_2}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project1:a"])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project2:c",
"//project2:d",
"//project2:e",
],
)
targets = builder.compute_targets_to_build(["//project1:b"])
self.assert_targets_equal(targets, ["//project1:b", "//project2:d"])
targets = builder.compute_targets_to_build(["//project2:c"])
self.assert_targets_equal(
targets, ["//project2:c", "//project2:e", "//project2:d"]
)
targets = builder.compute_targets_to_build(["//project2:d"])
self.assert_targets_equal(targets, ["//project2:d"])
targets = builder.compute_targets_to_build(["//project2:e"])
self.assert_targets_equal(targets, ["//project2:e", "//project2:d"])
self.assert_raises_builder_exception(
builder.compute_targets_to_build,
["//project1:f"],
expected_targets=["//project1:f"],
)
def test_targets_to_build_file_wildcard(self):
# Dependency graph:
# a -> c -> d <- e b
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary("/ROOT", "project1", base("a", ["//project2:c"])),
"b": PythonLibrary("/ROOT", "project1", base("b")),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"c": PythonLibrary("/ROOT", "project2", base("c", ["//project2:d"])),
"d": PythonLibrary("/ROOT", "project2", base("d")),
"e": PythonLibrary("/ROOT", "project2", base("e", ["//project2:d"])),
}
build_file_mapping = {"project1": build_file_1, "project2": build_file_2}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project1:"])
self.assert_targets_equal(
targets,
["//project1:a", "//project1:b", "//project2:c", "//project2:d"],
)
targets = builder.compute_targets_to_build(["//project2:"])
self.assert_targets_equal(
targets, ["//project2:c", "//project2:d", "//project2:e"]
)
targets = builder.compute_targets_to_build(["//project1:", "//project2:"])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project2:c",
"//project2:d",
"//project2:e",
],
)
def test_targets_to_build_directory_wildcard(self):
# Dependency graph:
# a -> c d b <- e
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary(
"/ROOT", "project1", base("a", ["//project1/subproject:c"])
),
"b": PythonLibrary("/ROOT", "project1", base("b")),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"c": PythonLibrary("/ROOT", "project1/subproject", base("c")),
"d": PythonLibrary("/ROOT", "project1/subproject", base("d")),
}
build_file_3 = MagicMock()
build_file_3.targets = {
"e": PythonLibrary("/ROOT", "project2", base("e", ["//project1:b"]))
}
build_file_mapping = {
"project1": build_file_1,
"project1/subproject": build_file_2,
"project2": build_file_3,
}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
"/ROOT/project2/TARGETS",
],
):
targets = builder.compute_targets_to_build(["//..."])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project1/subproject:c",
"//project1/subproject:d",
"//project2:e",
],
)
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
],
):
targets = builder.compute_targets_to_build(["//project1/..."])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project1/subproject:c",
"//project1/subproject:d",
],
)
with patch.object(
glob, "iglob", return_value=["/ROOT/project1/subproject/TARGETS"]
):
targets = builder.compute_targets_to_build(
["//project1/subproject/..."]
)
self.assert_targets_equal(
targets, ["//project1/subproject:c", "//project1/subproject:d"]
)
with patch.object(glob, "iglob", return_value=["/ROOT/project2/TARGETS"]):
targets = builder.compute_targets_to_build(["//project2/..."])
self.assert_targets_equal(targets, ["//project2:e", "//project1:b"])
def test_compute_targets_to_build_duplicates(self):
# Dependency graph:
# a
# / \
# b-py c-py
# |
# b
build_file = MagicMock()
thrift_target = ThriftLibrary(
"/ROOT", "project", base("b"), ["b.thrift"], False
)
build_file.targets = {
"a": PythonBinary(
"/ROOT",
"project",
base("a", dependencies=["//project:b-py", "//project:c-py"]),
),
"b": thrift_target,
"b-py": thrift_target,
"c-py": ThriftLibrary(
"/ROOT",
"project",
base("c", dependencies=["//project:b"]),
["c.thrift"],
False,
),
}
with patch.object(parser.Parser, "parse_file", return_value=build_file):
builder = FastBuckBuilder("/ROOT")
# b and b-py refer to the same build target; we should only build it once.
targets = builder.compute_targets_to_build(["//project:a"])
self.assert_targets_equal(
targets, ["//project:a", "//project:b", "//project:c"]
)
def test_targets_to_build_wheels(self):
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary(
"/ROOT", "project1", base("a", ["//project2/wheel:wheel"])
)
}
build_file_2 = MagicMock()
build_file_2.targets = {
"wheel": PythonWheel("/ROOT", "project2/wheel", base("wheel"), {}, {})
}
build_file_mapping = {"project1": build_file_1, "project2/wheel": build_file_2}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project1:a"])
self.assert_targets_equal(
targets, ["//project1:a", "//project2/wheel:wheel"]
)
targets = builder.compute_targets_to_build(["//project2/wheel:wheel"])
self.assert_targets_equal(targets, ["//project2/wheel:wheel"])
def test_compute_reverse_dependencies(self):
# Dependency graph:
# a
# / \
# b <- c
# | /
# d e
builder = FastBuckBuilder("/ROOT")
a = PythonBinary("/ROOT", "project", base("a", ["//project:b", "//project:c"]))
b = PythonLibrary("/ROOT", "project", base("b", ["//project:d"]))
c = PythonLibrary("/ROOT", "project", base("c", ["//project:b", "//project:d"]))
d = PythonLibrary("/ROOT", "project", base("d"))
e = PythonLibrary("/ROOT", "project", base("e"))
targets = [a, b, c, d, e]
reverse_dependencies = builder.compute_reverse_dependencies(targets)
self.assertDictEqual(
dict(reverse_dependencies),
{"//project:b": [a, c], "//project:c": [a], "//project:d": [b, c]},
)
self.assertEqual(reverse_dependencies["//project:a"], [])
self.assertEqual(reverse_dependencies["//project:e"], [])
def test_normalize_targets(self):
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonLibrary("/ROOT", "project1", base("a")),
"b": PythonLibrary("/ROOT", "project1", base("b")),
"c": PythonLibrary("/ROOT", "project1", base("c")),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"d": PythonLibrary("/ROOT", "project1/subproject", base("d")),
"e": PythonLibrary("/ROOT", "project1/subproject", base("e")),
}
build_file_3 = MagicMock()
build_file_3.targets = {"f": PythonLibrary("/ROOT", "project2", base("f"))}
build_file_mapping = {
"project1": build_file_1,
"project1/subproject": build_file_2,
"project2": build_file_3,
}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
# Regular targets
normalized_targets = builder._normalize_target("//project1:a")
self.assert_target_names_equal(normalized_targets, ["//project1:a"])
# File wildcard targets
normalized_targets = builder._normalize_target("//project1:")
self.assert_target_names_equal(
normalized_targets, ["//project1:a", "//project1:b", "//project1:c"]
)
normalized_targets = builder._normalize_target("//project1/subproject:")
self.assert_target_names_equal(
normalized_targets,
["//project1/subproject:d", "//project1/subproject:e"],
)
normalized_targets = builder._normalize_target("//project2:")
self.assert_target_names_equal(normalized_targets, ["//project2:f"])
# Directory wildcard targets
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
"/ROOT/project2/TARGETS",
],
) as fake_iglob:
normalized_targets = builder._normalize_target("//...")
self.assert_target_names_equal(
normalized_targets,
[
"//project1:a",
"//project1:b",
"//project1:c",
"//project1/subproject:d",
"//project1/subproject:e",
"//project2:f",
],
)
fake_iglob.assert_called_once_with("/ROOT/**/TARGETS", recursive=True)
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
],
) as fake_iglob:
normalized_targets = builder._normalize_target("//project1/...")
self.assert_target_names_equal(
normalized_targets,
[
"//project1:a",
"//project1:b",
"//project1:c",
"//project1/subproject:d",
"//project1/subproject:e",
],
)
fake_iglob.assert_called_once_with(
"/ROOT/project1/**/TARGETS", recursive=True
)
with patch.object(
glob, "iglob", return_value=["/ROOT/project1/subproject/TARGETS"]
) as fake_iglob:
normalized_targets = builder._normalize_target(
"//project1/subproject/..."
)
self.assert_target_names_equal(
normalized_targets,
["//project1/subproject:d", "//project1/subproject:e"],
)
fake_iglob.assert_called_once_with(
"/ROOT/project1/subproject/**/TARGETS", recursive=True
)
with patch.object(
glob, "iglob", return_value=["/ROOT/project2/TARGETS"]
) as fake_iglob:
normalized_targets = builder._normalize_target("//project2/...")
self.assert_target_names_equal(normalized_targets, ["//project2:f"])
fake_iglob.assert_called_once_with(
"/ROOT/project2/**/TARGETS", recursive=True
)
def test_build(self):
with patch.object(
FastBuckBuilder, "compute_targets_to_build"
) as compute_targets_to_build:
fake_targets = [MagicMock(), MagicMock(), MagicMock()]
compute_targets_to_build.return_value = fake_targets
builder = FastBuckBuilder("/ROOT", output_directory="/output")
builder.build(["//target:"])
for fake_target in fake_targets:
fake_target.build.assert_called_once_with("/output")
| 37.590038 | 88 | 0.505045 | 19,092 | 0.97299 | 0 | 0 | 0 | 0 | 0 | 0 | 4,838 | 0.24656 |
29f94d2b334b89e0c508fee4d9e22209246bc128 | 5,970 | py | Python | api/user.py | gfoo/fastapi-demo | 44ceb9e94fa833841756136c3b446f192a311dde | [
"Unlicense"
] | null | null | null | api/user.py | gfoo/fastapi-demo | 44ceb9e94fa833841756136c3b446f192a311dde | [
"Unlicense"
] | null | null | null | api/user.py | gfoo/fastapi-demo | 44ceb9e94fa833841756136c3b446f192a311dde | [
"Unlicense"
] | null | null | null | from time import time
from typing import List
from core.security import verify_password
from db import users as DBUsers
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.responses import JSONResponse
from models.user import DBUser
from schemas.user import (UserCreate, UserUpdateActivate, UserUpdatePassword,
UserUpdateSuperuser, UserView)
from sqlalchemy.orm import Session
from .deps import get_current_active_superuser, get_current_active_user, get_db
router = APIRouter(
prefix='/users',
tags=['users']
)
@router.get('/', response_model=List[UserView])
def get_all_users(skip: int = 0, limit: int = 100,
db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Retrieve users.
"""
return DBUsers.get_users(db, skip=skip, limit=limit)
@router.get("/me", response_model=UserView)
def get_user(db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Retrieve my user.
"""
return current_user
@router.get("/{user_id}", response_model=UserView)
def get_user(user_id: int, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Retrieve a user (only itself if not enough privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user == current_user:
return db_user
if not current_user.is_superuser:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The user does not have enough privileges"
)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
return db_user
@router.post("/{user_id}/reset_password", response_model=UserView)
def update_user_password_reset(
user_id: int, user_passwords: UserUpdatePassword, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user password (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_password(
db=db, user_id=user_id, new_password=user_passwords.new_password)
return db_user
@router.post("/{user_id}/activate", response_model=UserView)
def update_user_activate(
user_id: int, user_activate: UserUpdateActivate, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user activation (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_activate(
db=db, user_id=user_id, activate=user_activate.activate)
return db_user
@router.post("/{user_id}/superuser", response_model=UserView)
def update_user_activate(
user_id: int, user_superuser: UserUpdateSuperuser, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user privileges (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_superuser(
db=db, user_id=user_id, superuser=user_superuser.superuser)
return db_user
@router.post("/{user_id}/password", response_model=UserView)
def update_user_password(
user_id: int, user_passwords: UserUpdatePassword, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Update personal user password (require previous password).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
if db_user != current_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Can only update its own password"
)
if user_passwords.old_password == user_passwords.new_password:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="New password cannot be the same as the old one")
if not verify_password(user_passwords.old_password, db_user.password):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Incorrect old password")
DBUsers.update_user_password(
db=db, user_id=user_id, new_password=user_passwords.new_password)
return db_user
@router.post("/", response_model=UserView)
def create_user(user: UserCreate, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Create a user.
"""
db_user = DBUsers.get_user_by_email(db, email=user.email)
if db_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
return DBUsers.create_user(db=db, user=user)
@router.delete("/{user_id}", response_class=JSONResponse)
def delete_user(user_id: int, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Delete a user (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.delete_user(db=db, user_id=user_id)
return JSONResponse(content={"status": "ok", "user_id": user_id})
| 36.402439 | 109 | 0.701675 | 0 | 0 | 0 | 0 | 5,372 | 0.899832 | 0 | 0 | 952 | 0.159464 |
29f9eab4a69842a784121a1073e07bcadc752ced | 3,265 | py | Python | Realsense2CV.py | felix2072/pytorch-CycleGAN-and-pix2pix | 4980106ceab5e1eb7bb20c2b492d007b6310d9e1 | [
"BSD-3-Clause"
] | null | null | null | Realsense2CV.py | felix2072/pytorch-CycleGAN-and-pix2pix | 4980106ceab5e1eb7bb20c2b492d007b6310d9e1 | [
"BSD-3-Clause"
] | null | null | null | Realsense2CV.py | felix2072/pytorch-CycleGAN-and-pix2pix | 4980106ceab5e1eb7bb20c2b492d007b6310d9e1 | [
"BSD-3-Clause"
] | null | null | null | ## License: Apache 2.0. See LICENSE file in root directory.
## Copyright(c) 2015-2017 Intel Corporation. All Rights Reserved.
###############################################
## Open CV and Numpy integration ##
###############################################
import pyrealsense2 as rs
import numpy as np
import cv2
def auto_canny(image, sigma=0.33):
# compute the median of the single channel pixel intensities
v = np.median(image)
# apply automatic Canny edge detection using the computed median
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edged = cv2.Canny(image, lower, upper)
# return the edged image
return edged
# Configure depth and color streams
pipeline = rs.pipeline()
config = rs.config()
# Get device product line for setting a supporting resolution
pipeline_wrapper = rs.pipeline_wrapper(pipeline)
pipeline_profile = config.resolve(pipeline_wrapper)
device = pipeline_profile.get_device()
device_product_line = str(device.get_info(rs.camera_info.product_line))
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
width = 640
height = 480
if device_product_line == 'L500':
config.enable_stream(rs.stream.color, 960, 540, rs.format.bgr8, 30)
else:
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
# Start streaming
pipeline.start(config)
max_lowThreshold = 100
window_name = 'Edge Map'
title_trackbar = 'Min Threshold:'
ratio = 3
kernel_size = 3
try:
while True:
# Wait for a coherent pair of frames: depth and color
frames = pipeline.wait_for_frames()
depth_frame = frames.get_depth_frame()
color_frame = frames.get_color_frame()
if not depth_frame or not color_frame:
continue
# Convert images to numpy arrays
object_color = np.zeros((height, width, 3), np.uint8)
depth_image = np.asanyarray(depth_frame.get_data())
color_image = np.asanyarray(color_frame.get_data())
# depth_image_rgb = cv2.merge((depth_image,depth_image,depth_image))
# Apply colormap on depth image (image must be converted to 8-bit per pixel first)
# depth_colormap = cv2.applyColorMap(cv2.convertScaleAbs(depth_image, alpha=0.03), cv2.COLORMAP_JET)
# depth_colormap_dim = depth_colormap.shape
color_colormap_dim = color_image.shape
depth_image = cv2.resize(depth_image, (width, height), interpolation=cv2.INTER_AREA)
edges = auto_canny(color_image)
#edges = cv2.bitwise_not(edges)
edges_rgb = object_color.shape
edges_rgb = cv2.merge((edges,edges,edges))
#blank_image[5:10 , 5:10] = (255, 0, 0) # [x.1,x.2 , y.1,y.2] (B, G, R)
object_color[0:width, 0:height] = (76, 76, 76)
image = cv2.add(edges_rgb,object_color)
edges_rgb = cv2.bitwise_not(edges_rgb)
image = cv2.multiply(edges_rgb,image,scale = 0.003922)
image = image[0:256, 0:256]
# Show images
cv2.namedWindow('RealSense', cv2.WINDOW_AUTOSIZE)
cv2.imshow('RealSense', image)
cv2.waitKey(1)
finally:
# Stop streaming
pipeline.stop()
| 34.368421 | 109 | 0.644717 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,111 | 0.340276 |
29faa4ea69ec98280ad24b2003914856eee015a8 | 12,800 | py | Python | governor/postgresql.py | billcap/governor | 0056ec15d973d24f36688783b415fe894ca94db7 | [
"MIT"
] | null | null | null | governor/postgresql.py | billcap/governor | 0056ec15d973d24f36688783b415fe894ca94db7 | [
"MIT"
] | null | null | null | governor/postgresql.py | billcap/governor | 0056ec15d973d24f36688783b415fe894ca94db7 | [
"MIT"
] | null | null | null | import logging
import os
import psycopg2
import time
import shlex
import subprocess
import shutil
import threading
from urllib.parse import urlparse
logger = logging.getLogger(__name__)
class Postgresql:
CONN_OPTIONS = {
'connect_timeout': 3,
'options': '-c statement_timeout=2000',
}
_conn = None
_cursor_holder = None
def __init__(self, config, psql_config):
self.config = config
self.psql_config = psql_config
self.name = config.name
self.listen_addresses, self.port = config.listen_address.split(':')
self.data_dir = config.data_dir
self.recovery_conf = os.path.join(self.data_dir, 'recovery.conf')
self.pid_path = os.path.join(self.data_dir, 'postmaster.pid')
self._pg_ctl = ('pg_ctl', '-w', '-D', self.data_dir)
self.members = set() # list of already existing replication slots
self.promoted = False
def parseurl(self, url):
r = urlparse('postgres://' + url)
options = {
'host': r.hostname,
'port': r.port or 5432,
'user': self.config.repl_user,
'password': self.config.repl_password,
'database': self.config.dbname,
'fallback_application_name': 'Governor',
}
options.update(self.CONN_OPTIONS)
return options
def pg_ctl(self, *args, **kwargs):
cmd = self._pg_ctl + args
logger.info(cmd)
return subprocess.call(cmd, **kwargs)
def connection(self):
if not self._conn or self._conn.closed:
self._conn = psycopg2.connect(
dbname=self.config.dbname,
port=self.port,
user=self.config.user,
password=self.config.password,
**self.CONN_OPTIONS
)
self._conn.autocommit = True
return self._conn
def _cursor(self):
if not self._cursor_holder or self._cursor_holder.closed:
self._cursor_holder = self.connection().cursor()
return self._cursor_holder
def disconnect(self):
if self._conn:
self._conn.close()
self._conn = self._cursor_holder = None
def query(self, sql, *params):
max_attempts = 3
for i in range(max_attempts):
ex = None
try:
cursor = self._cursor()
cursor.execute(sql, params)
return cursor
except psycopg2.InterfaceError as e:
ex = e
except psycopg2.OperationalError as e:
if self._conn and self._conn.closed == 0:
raise e
ex = e
self.disconnect()
time.sleep(5)
if ex:
raise ex
def data_directory_empty(self):
return not (os.path.exists(self.data_dir) and os.listdir(self.data_dir))
def initialize(self):
if subprocess.call(['initdb', '-D', self.data_dir, '--encoding', 'UTF-8']) == 0:
self.write_pg_hba()
return True
return False
def sync_from_leader(self, leader):
r = self.parseurl(leader.value)
env = os.environ.copy()
if r['password'] is not None:
pgpass = os.path.join(os.environ['ROOT'], 'pgpass')
with open(pgpass, 'w') as f:
os.fchmod(f.fileno(), 0o600)
f.write('{host}:{port}:*:{user}:{password}\n'.format(**r))
env['PGPASSFILE'] = pgpass
try:
subprocess.check_call([
'pg_basebackup', '-R', '-P', '-w',
'-D', self.data_dir,
'--host', r['host'],
'--port', str(r['port']),
'-U', self.config.repl_user,
], env=env)
except subprocess.CalledProcessError:
return False
finally:
os.chmod(self.data_dir, 0o700)
return True
def is_leader(self):
is_leader = not self.query('SELECT pg_is_in_recovery()').fetchone()[0]
if is_leader:
self.promoted = False
return is_leader
def is_running(self):
return self.pg_ctl('status', stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0
def start_threaded(self):
logger = logging.getLogger('postgres')
cmd = [
'postgres', '-i',
'-p', self.port,
'-h', self.listen_addresses,
'-D', self.data_dir,
] + self.psql_config
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
while True:
line = proc.stdout.readline()
if not line:
break
logging.info(line)
def start(self):
if self.is_running():
self.load_replication_slots()
logger.error('Cannot start PostgreSQL because one is already running.')
return False
if os.path.exists(self.pid_path):
os.remove(self.pid_path)
logger.info('Removed %s', self.pid_path)
self.disconnect()
thread = threading.Thread(target=self.start_threaded)
thread.daemon = True
thread.start()
return True
def stop(self):
self.disconnect()
return self.pg_ctl('stop', '-m', 'fast') != 0
def reload(self):
return self.pg_ctl('reload') == 0
def restart(self):
self.disconnect()
return self.pg_ctl('restart', '-m', 'fast') == 0
def is_healthy(self):
if not self.is_running():
logger.warning('Postgresql is not running.')
return False
return True
def is_healthiest_node(self, cluster):
if self.is_leader():
return True
if int(cluster.optime.value) - self.xlog_position() > self.config.maximum_lag:
return False
for name, m in cluster.members.items():
if name == self.name:
continue
try:
member_conn = psycopg2.connect(**self.parseurl(m.value))
member_conn.autocommit = True
member_cursor = member_conn.cursor()
member_cursor.execute(
"SELECT pg_is_in_recovery(), %s - (pg_last_xlog_replay_location() - '0/0000000'::pg_lsn)",
(self.xlog_position(), ))
row = member_cursor.fetchone()
member_cursor.close()
member_conn.close()
logger.error([self.name, name, row])
if not row[0] or row[1] < 0:
return False
except psycopg2.Error:
continue
return True
def write_pg_hba(self):
if self.config.password:
method = 'md5'
else:
logger.warning('No password specified')
method = 'trust'
hba = ['local all all trust']
for subnet in self.config.allow_address.split():
hba.append(' '.join(['host', self.config.dbname, self.config.user, subnet, method]))
if self.config.repl_password:
method = 'md5'
else:
logger.warning('No replication password specified')
method = 'trust'
for subnet in self.config.repl_allow_address.split():
hba.append(' '.join(['host', 'replication', self.config.repl_user, subnet, method]))
config = ConfigFile(os.path.join(self.data_dir, 'pg_hba.conf'))
config.write_config(*hba)
def primary_conninfo(self, leader_url):
r = self.parseurl(leader_url)
values = ['{}={}'.format(k, r[k]) for k in ['user', 'host', 'port']]
if r['password'] is not None:
values.append('password={}'.format(r['password']))
return '{} sslmode=prefer sslcompression=1'.format(' '.join(values))
def check_recovery_conf(self, leader):
if not os.path.isfile(self.recovery_conf):
return False
pattern = (leader and self.primary_conninfo(leader.value))
for key, value in RecoveryConf(self.recovery_conf).load_config():
if key == 'primary_conninfo':
if not pattern:
return False
return value[1:-1] == pattern
return not pattern
def write_recovery_conf(self, leader):
contents = [
('standby_mode', 'on'),
('recovery_target_timeline', 'latest'),
]
if leader:
contents.append(('primary_slot_name', self.name))
contents.append(('primary_conninfo', self.primary_conninfo(leader.value)))
config = RecoveryConf(self.recovery_conf)
config.write_config(*contents, truncate = not leader)
def follow_the_leader(self, leader):
if not self.check_recovery_conf(leader):
self.write_recovery_conf(leader)
self.restart()
def promote(self):
self.promoted = (self.pg_ctl('promote') == 0)
return self.promoted
def create_users(self):
op = ('ALTER' if self.config.user == 'postgres' else 'CREATE')
query = '{} USER "{}" WITH {}'.format
# normal client user
self.create_user(query(op, self.config.user, 'SUPERUSER'), self.config.password)
# replication user
self.create_user(query('CREATE', self.config.repl_user, 'REPLICATION'), self.config.repl_password)
def create_user(self, query, password):
if password:
return self.query(query + ' ENCRYPTED PASSWORD %s', password)
return self.query(query)
def xlog_position(self):
return self.query("""SELECT CASE WHEN pg_is_in_recovery()
THEN pg_last_xlog_replay_location() - '0/0000000'::pg_lsn
ELSE pg_current_xlog_location() - '0/00000'::pg_lsn END""").fetchone()[0]
def load_replication_slots(self):
cursor = self.query("SELECT slot_name FROM pg_replication_slots WHERE slot_type='physical'")
self.members = set(r[0] for r in cursor)
def sync_replication_slots(self, members):
members = set(name for name in members if name != self.name)
# drop unused slots
for slot in self.members - members:
self.query("""SELECT pg_drop_replication_slot(%s)
WHERE EXISTS(SELECT 1 FROM pg_replication_slots
WHERE slot_name = %s)""", slot, slot)
# create new slots
for slot in members - self.members:
self.query("""SELECT pg_create_physical_replication_slot(%s)
WHERE NOT EXISTS (SELECT 1 FROM pg_replication_slots
WHERE slot_name = %s)""", slot, slot)
self.members = members
def create_replication_slots(self, cluster):
self.sync_replication_slots([name for name in cluster.members if name != self.name])
def drop_replication_slots(self):
self.sync_replication_slots([])
def last_operation(self):
return self.xlog_position()
class ConfigFile:
__slots__ = ('path',)
def __init__(self, path):
self.path = path
backup = self.path + '.backup'
if not os.path.exists(backup):
if os.path.exists(self.path):
os.rename(self.path, backup)
else:
with open(backup, 'w'): pass
def reload_backup(self):
shutil.copy(self.path + '.backup', self.path)
def load_config(self):
with open(self.path) as file:
for line in file:
if not line.startswith('#'):
yield line
def write_config(self, *lines, reload=True, check_duplicates=True, truncate=False):
if reload:
self.reload_backup()
if check_duplicates:
config = set(self.load_config())
else:
config = ()
mode = ('w' if truncate else 'a')
with open(self.path, mode) as file:
for l in lines:
if l not in config:
file.write('\n' + l)
file.write('\n')
class RecoveryConf(ConfigFile):
def load_config(self):
for line in super().load_config():
k, _, v = line.strip().partition(' = ')
yield (k, v)
def write_config(self, *args, reload=True, check_duplicates=True, **kwargs):
if reload:
self.reload_backup()
if check_duplicates:
config = set(i[0] for i in self.load_config())
else:
config = ()
args = ("{} = '{}'".format(k, v) for k, v in args if k not in config)
return super().write_config(*args, reload=False, check_duplicates=False, **kwargs)
| 33.952255 | 114 | 0.563984 | 12,606 | 0.984844 | 308 | 0.024063 | 0 | 0 | 0 | 0 | 1,959 | 0.153047 |
29fbb43e9c43f01cd5a84414b7fa4416473edd33 | 566 | py | Python | main.py | Benrflanders/Genetic-Algorithm-Function-Solver | 7234aed5478d0701f0f8ce342116ac154aa40ba1 | [
"MIT"
] | null | null | null | main.py | Benrflanders/Genetic-Algorithm-Function-Solver | 7234aed5478d0701f0f8ce342116ac154aa40ba1 | [
"MIT"
] | null | null | null | main.py | Benrflanders/Genetic-Algorithm-Function-Solver | 7234aed5478d0701f0f8ce342116ac154aa40ba1 | [
"MIT"
] | null | null | null | import genetic_algorithm
#where the population will be processed and the main loop is contained
#initialise population with random candidate solutions
print("Enter a function to be solved: \n")
fitness_function = [1780, 17, -2] #n = ax + by
#function: [n, a, b]
ga = genetic_algorithm.genetic_algorithm(fitness_function)
#evaluate each candidate
#repeat until (termination condition is satifsfied ) DO
#select parents;
#recombine pairs of parents
#mutate the resulting offspring
#evaluate new candidates
#select individuals for the next generation
#OD
#END
| 21.769231 | 70 | 0.780919 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 418 | 0.738516 |
29fda9d9b2256b8b4efc118aa8ea61e7cbc1a09c | 264 | py | Python | thirdparty/flask/template/macro_demo.py | gwaysoft/python | a74a0b553dfca9606083a41ab6d03801e67d2467 | [
"Apache-2.0"
] | null | null | null | thirdparty/flask/template/macro_demo.py | gwaysoft/python | a74a0b553dfca9606083a41ab6d03801e67d2467 | [
"Apache-2.0"
] | null | null | null | thirdparty/flask/template/macro_demo.py | gwaysoft/python | a74a0b553dfca9606083a41ab6d03801e67d2467 | [
"Apache-2.0"
] | null | null | null | from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template("macro.html", type="text", value="from endpoint")
if __name__ == '__main__':
print(app.url_map)
app.run(debug=True, host="0.0.0.0")
| 18.857143 | 76 | 0.674242 | 0 | 0 | 0 | 0 | 105 | 0.397727 | 0 | 0 | 55 | 0.208333 |
29fdda258cbe5d54b3217108b57775e883bf274f | 549 | py | Python | fishpass/migrations/0004_auto_20180925_1825.py | Ecotrust/FishPass | a69a4f9de46f28653ae92ef33c1e5cf7036cfb37 | [
"MIT"
] | 3 | 2019-03-01T04:00:21.000Z | 2022-02-10T22:17:20.000Z | fishpass/migrations/0004_auto_20180925_1825.py | Ecotrust/FishPass | a69a4f9de46f28653ae92ef33c1e5cf7036cfb37 | [
"MIT"
] | 165 | 2018-04-13T18:24:39.000Z | 2022-03-02T03:27:33.000Z | fishpass/migrations/0004_auto_20180925_1825.py | Ecotrust/FishPass | a69a4f9de46f28653ae92ef33c1e5cf7036cfb37 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-26 01:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('fishpass', '0003_auto_20180925_1825'),
]
operations = [
migrations.AlterField(
model_name='project',
name='focus_region',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fishpass.FocusArea'),
),
]
| 24.954545 | 106 | 0.657559 | 357 | 0.650273 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.273224 |
29fded4c87d470f4257846244ccbee2b48588393 | 8,956 | py | Python | backend/account/migrations/0001_initial.py | CS178A-B/final-project-bjls | aebb8042f2d958caac00e31b27b445b9079901d0 | [
"MIT"
] | null | null | null | backend/account/migrations/0001_initial.py | CS178A-B/final-project-bjls | aebb8042f2d958caac00e31b27b445b9079901d0 | [
"MIT"
] | 20 | 2020-10-21T19:16:15.000Z | 2021-09-03T05:48:20.000Z | backend/account/migrations/0001_initial.py | CS178A-B/R-Finder | aebb8042f2d958caac00e31b27b445b9079901d0 | [
"MIT"
] | 1 | 2020-10-22T04:49:45.000Z | 2020-10-22T04:49:45.000Z |
# Generated by Django 2.2.13 on 2021-03-10 21:33
import account.models
import datetime
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('is_student', models.BooleanField(default=False, verbose_name=account.models.Student)),
('is_faculty', models.BooleanField(default=False, verbose_name=account.models.Faculty)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.CharField(max_length=150)),
('abbrev', models.CharField(max_length=50)),
('grade', models.CharField(blank=True, default='', max_length=3, null=True)),
],
),
migrations.CreateModel(
name='Faculty',
fields=[
('department', models.CharField(default='', max_length=50)),
('profile_completeness', models.IntegerField(default=0)),
('user', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('major', models.CharField(default='', max_length=50)),
('GPA', models.FloatField(blank=True, default=0, null=True)),
('profile_completeness', models.IntegerField(default=0)),
('resume_pdf', models.FileField(blank=True, null=True, upload_to='pdf')),
('transcript', models.FileField(blank=True, null=True, upload_to='pdf')),
('user', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=150)),
('posted_date', models.DateField(verbose_name=datetime.date(2021, 3, 10))),
('hourly_salary', models.FloatField(blank=True, default=10, max_length=10)),
('hours_per_week', models.IntegerField(default=10)),
('course_req', models.ManyToManyField(blank=True, default=0, to='account.Course')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.CharField(max_length=1500)),
('course', models.ManyToManyField(blank=True, default=0, to='account.Course')),
],
),
migrations.CreateModel(
name='Application',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('application_date', models.DateField(verbose_name=datetime.date(2021, 3, 10))),
('applicant_score', models.IntegerField(default=0)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Job')),
],
),
migrations.CreateModel(
name='StudentCourse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('grade', models.CharField(default='', max_length=50)),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Course')),
('student', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Student')),
],
),
migrations.AddField(
model_name='student',
name='applications',
field=models.ManyToManyField(blank=True, default=0, through='account.Application', to='account.Job'),
),
migrations.AddField(
model_name='student',
name='comments_recv',
field=models.ManyToManyField(blank=True, default=0, to='account.Comment'),
),
migrations.AddField(
model_name='student',
name='course_taken',
field=models.ManyToManyField(blank=True, default=0, through='account.StudentCourse', to='account.Course'),
),
migrations.AddField(
model_name='job',
name='applications',
field=models.ManyToManyField(blank=True, default=0, through='account.Application', to='account.Student'),
),
migrations.AddField(
model_name='job',
name='poster',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Faculty'),
),
migrations.AddField(
model_name='faculty',
name='comments_made',
field=models.ManyToManyField(blank=True, default=0, to='account.Comment'),
),
migrations.AddField(
model_name='faculty',
name='courses_taught',
field=models.ManyToManyField(blank=True, default=0, to='account.Course'),
),
migrations.AddField(
model_name='course',
name='students',
field=models.ManyToManyField(blank=True, default=0, through='account.StudentCourse', to='account.Student'),
),
migrations.AddField(
model_name='comment',
name='commenter',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Faculty'),
),
migrations.AddField(
model_name='application',
name='student',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Student'),
),
]
| 50.03352 | 329 | 0.61054 | 8,655 | 0.966391 | 0 | 0 | 0 | 0 | 0 | 0 | 1,991 | 0.222309 |
29ffd5d34e2555908d5acb7cecdc5aad3a6e87bc | 1,983 | py | Python | src/predictionAlgorithms/machineLearning/training/convolutionalLstm.py | aivaras-ciurlionis/meteo | 434759d16f7cca505d280475611d1fef5176827b | [
"MIT"
] | null | null | null | src/predictionAlgorithms/machineLearning/training/convolutionalLstm.py | aivaras-ciurlionis/meteo | 434759d16f7cca505d280475611d1fef5176827b | [
"MIT"
] | 6 | 2020-05-23T11:30:48.000Z | 2022-03-11T23:45:06.000Z | src/predictionAlgorithms/machineLearning/training/convolutionalLstm.py | aivaras-ciurlionis/meteo | 434759d16f7cca505d280475611d1fef5176827b | [
"MIT"
] | null | null | null | import tensorflow
from PIL import Image
from keras.models import Sequential
from keras.layers import Conv2D, Conv2DTranspose, ConvLSTM2D
from keras.optimizers import SGD
import numpy as np
import os
from keras import backend as K
from src.predictionAlgorithms.machineLearning.algorithms.ConvLSTM import ConvLstm
from src.predictionAlgorithms.machineLearning.algorithms.ConvolutionalChannelsMovementAlgorithm import \
ConvolutionalChannelsMovementAlgorithm
from src.predictionAlgorithms.machineLearning.helpers.callbacks import Callbacks
from src.utilities.imageAnalysis.pixelsRainStrengthConverter import PixelsRainStrengthConverter
class ConvolutionalLstmTrain:
@staticmethod
def train(size, channels, validation_data, loader, val):
model = Sequential()
model.add(
ConvLSTM2D(
filters=1,
padding='same',
kernel_size=(6, 6),
activation='relu',
input_shape=(channels, 1, size, size),
data_format='channels_first',
return_sequences=False
)
)
model.add(
Conv2D(
filters=1,
kernel_size=(8, 8),
activation='relu',
padding='same',
data_format='channels_first'
)
)
model.compile(
optimizer=SGD(lr=0.01, decay=0.01/50),
loss='mse'
)
callback = Callbacks()
callback \
.set_algorithm(ConvLstm(model=model).with_size(size)) \
.set_validation_data(validation_data) \
.set_size(size) \
.set_validation_frequency(1) \
.set_base(6)
model.fit_generator(loader(), epochs=50, steps_per_epoch=20, shuffle=True, callbacks=[callback],
validation_data=val)
model.save('conv_lstm.h5')
# K: 12x12 -> lr: 0.01 -> E = 50; SpE = 10 | 34.189655 | 104 | 0.611699 | 1,299 | 0.655068 | 0 | 0 | 1,264 | 0.637418 | 0 | 0 | 117 | 0.059002 |
4b00cec2aa25b2e4c87f0a86c86662d5e0d2edb1 | 1,927 | py | Python | batchtest.py | nachewigkeit/CropDefender | e78fc48f720367ca94033f6263eb1e4a9c6b7858 | [
"MIT"
] | 2 | 2021-10-14T08:14:15.000Z | 2021-12-01T05:57:49.000Z | batchtest.py | nachewigkeit/CropDefender | e78fc48f720367ca94033f6263eb1e4a9c6b7858 | [
"MIT"
] | null | null | null | batchtest.py | nachewigkeit/CropDefender | e78fc48f720367ca94033f6263eb1e4a9c6b7858 | [
"MIT"
] | 1 | 2021-12-01T05:57:53.000Z | 2021-12-01T05:57:53.000Z | import bchlib
from PIL import Image, ImageOps
import numpy as np
import glob
from tqdm import tqdm
import torch
import matplotlib.pyplot as plt
from model import StegaStampDecoder
BCH_POLYNOMIAL = 137
BCH_BITS = 5
def get_bits(secret="MITPBL"):
# 输入字符串,输出BCH码
bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)
data = bytearray(secret + ' ' * (7 - len(secret)), 'utf-8')
ecc = bch.encode(data)
packet = data + ecc
packet_binary = ''.join(format(x, '08b') for x in packet)
return packet_binary
def get_model(model_path):
# 输入模型参数路径, 输出模型
decoder = torch.load(model_path).cuda()
return decoder
def decode(image, model):
# 输出模型与图片,输出预测结果(图片一定要是归一化到0-1区间的!)
image = torch.from_numpy(image.transpose((2, 0, 1))).unsqueeze(0).cuda()
secret = model(image)
secret = np.array(secret[0].cpu())
secret = np.round(secret)
packet_binary = "".join([str(int(bit)) for bit in secret[:96]])
return packet_binary
def get_acc(true, pred):
# 输入预测二进制串与实际二进制串,输出准确率
secret_size = len(true)
count = 0
for i in range(secret_size):
if true[i] == pred[i]:
count += 1
acc = count / 96
return acc
if __name__ == "__main__":
dirPath = r"E:/dataset/stegastamp_crop"
modelPath = r'saved_models/decoder.pth'
file_list = glob.glob(dirPath + '/*.png')
model = StegaStampDecoder().cuda()
model.load_state_dict(torch.load(modelPath))
model.eval()
bitstring = get_bits()
store = []
with torch.no_grad():
for file in tqdm(file_list):
image = Image.open(file).convert("RGB")
image = image.crop((50, 50, 350, 350))
image = np.array(ImageOps.fit(image, (400, 400)), dtype=np.float32)
image /= 255.
result = decode(image, model)
store.append(get_acc(bitstring, result))
plt.hist(store)
plt.show()
print(np.mean(store))
| 24.705128 | 79 | 0.63259 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 338 | 0.163206 |
4b065798f8f3175be2995f3dc86fae9e7dc987b7 | 1,249 | py | Python | tests/ozpcenter_model_access/test_contact_type.py | emosher/ozp-backend | d31d00bb8a28a8d0c999813f616b398f41516244 | [
"Apache-2.0"
] | 1 | 2018-10-05T17:03:01.000Z | 2018-10-05T17:03:01.000Z | tests/ozpcenter_model_access/test_contact_type.py | emosher/ozp-backend | d31d00bb8a28a8d0c999813f616b398f41516244 | [
"Apache-2.0"
] | 1 | 2017-01-06T19:20:32.000Z | 2017-01-06T19:20:32.000Z | tests/ozpcenter_model_access/test_contact_type.py | emosher/ozp-backend | d31d00bb8a28a8d0c999813f616b398f41516244 | [
"Apache-2.0"
] | 7 | 2016-12-16T15:42:05.000Z | 2020-09-05T01:11:27.000Z | import pytest
from django.test import TestCase
from django.test import override_settings
import ozpcenter.api.contact_type.model_access as model_access
from ozpcenter.models import ContactType
from tests.cases.factories import ContactTypeFactory
@pytest.mark.model_access
@override_settings(ES_ENABLED=False)
class ContactTypeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.contact_types = ContactTypeFactory.create_batch(5)
def setUp(self):
pass
def test__get_all_contact_types(self):
results = list(model_access.get_all_contact_types().order_by("id"))
self.assertListEqual(results, self.contact_types)
def test__get_contact_type_by_name(self):
expected = self.contact_types[0]
result = model_access.get_contact_type_by_name(expected.name)
self.assertEqual(result, expected)
def test__get_contact_type_by_name__not_found(self):
contact_type = model_access.get_contact_type_by_name('Not Existent', False)
self.assertIsNone(contact_type)
def test__get_contact_type_by_name__not_found_raises_error(self):
with self.assertRaises(ContactType.DoesNotExist):
model_access.get_contact_type_by_name('Not Existent')
| 30.463415 | 83 | 0.767814 | 936 | 0.7494 | 0 | 0 | 999 | 0.79984 | 0 | 0 | 32 | 0.02562 |
4b070ef3534dcec1b94204596a275dcc71c8d799 | 428 | py | Python | examples/echobot.py | samedamci/telegrask | 8cd0d7663e3a7386784396462f66c176bc6543c5 | [
"0BSD"
] | 4 | 2021-08-19T19:17:17.000Z | 2021-10-12T19:25:59.000Z | examples/echobot.py | samedamci/telegrask | 8cd0d7663e3a7386784396462f66c176bc6543c5 | [
"0BSD"
] | null | null | null | examples/echobot.py | samedamci/telegrask | 8cd0d7663e3a7386784396462f66c176bc6543c5 | [
"0BSD"
] | 1 | 2021-08-31T10:49:34.000Z | 2021-08-31T10:49:34.000Z | #!/usr/bin/python3
"""Simple bot to reply exactly the same what user sent to chat."""
# This program is dedicated to the public domain under the CC0 license.
from telegrask import Telegrask
bot = Telegrask("BOT_TOKEN")
@bot.command("echo", help="repeat user words", allow_without_prefix=True)
def echo(update, context):
update.message.reply_text(update.message.text)
if __name__ == "__main__":
bot.run(debug=True)
| 25.176471 | 73 | 0.740654 | 0 | 0 | 0 | 0 | 151 | 0.352804 | 0 | 0 | 201 | 0.469626 |
4b07a5e3542e7f446d97c19101d6130c567a06f9 | 2,238 | py | Python | lib/emailsmtp/models.py | hdknr/emailqueue | 05e108562f4fb612440f769973b9a3d02c11afcd | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | lib/emailsmtp/models.py | hdknr/emailqueue | 05e108562f4fb612440f769973b9a3d02c11afcd | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | lib/emailsmtp/models.py | hdknr/emailqueue | 05e108562f4fb612440f769973b9a3d02c11afcd | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | from django.db import models
from django.utils.translation import ugettext_lazy as _
from emailqueue.models import BaseModel
class Domain(BaseModel):
'''Domain:
- used for :ref:`postfix.relay_domains`, :ref:`postfix.transport_maps`
'''
domain = models.CharField(
_('Domain'), max_length=50, unique=True, db_index=True,)
'''`where_field`, also `select_field` for relay_domains '''
transport = models.CharField(
_('Transport'), max_length=200)
'''`where_field` for transport_maps'''
alias_domain = models.ForeignKey(
'Domain', verbose_name=_('Alias Transport'),
related_name='alias_domain_set',
null=True, default=None, blank=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _('Domain')
verbose_name_plural = _('Domain')
def __unicode__(self):
return self.domain
def create_alias_domain(self, name):
domain, created = Domain.objects.get_or_create(
doamin=name, transport='error',
alias=self)
return domain
def add_alias_address(self, user, alias_user=None):
if not self.alias_domain:
return
src = '{0}@{1}'.format(user, self.domain)
dst = '{0}@{1}'.format(alias_user or user, self.alias_domain.domain)
alias = self.alias_set.filter(recipient=src).first()
if alias:
alias.forward = dst
alias.save()
else:
alias = self.alias_set.create(recipient=src, forward=dst)
return alias
class Alias(BaseModel):
'''Alias
- Used in :ref:`postfix.virtual_alias_maps`
'''
domain = models.ForeignKey(
Domain,
null=True, default=None, blank=True, on_delete=models.SET_NULL)
recipient = models.EmailField(
_('Recipient Address'), max_length=100, unique=True, db_index=True)
'''`where_field` for virtual_alias_maps '''
forward = models.EmailField(
_('Forward Address'), max_length=100)
'''`select_field` for virtual_alias_maps '''
class Meta:
verbose_name = _('Alias')
verbose_name_plural = _('Alias')
def __unicode__(self):
return u"{0}>{1}".format(self.recipient, self.forward)
| 29.84 | 76 | 0.636282 | 2,106 | 0.941019 | 0 | 0 | 0 | 0 | 0 | 0 | 506 | 0.226095 |
4b096109d1a756991d2981702ec6615bda617d75 | 3,314 | py | Python | emoji-list.unicode.crawler.py | SHITianhao/emoji-dataset | 41812649f518f69472722c56d4aa77faeb9bbe8a | [
"MIT"
] | 2 | 2017-12-19T06:44:59.000Z | 2020-01-17T20:06:53.000Z | emoji-list.unicode.crawler.py | SHITianhao/emoji-dataset | 41812649f518f69472722c56d4aa77faeb9bbe8a | [
"MIT"
] | null | null | null | emoji-list.unicode.crawler.py | SHITianhao/emoji-dataset | 41812649f518f69472722c56d4aa77faeb9bbe8a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This module is used to crawler emoji unicode from http://www.unicode.org/ """
import urllib
import json
import base64
import os
from bs4 import BeautifulSoup
__EMOJI_V4_URL = "http://www.unicode.org/emoji/charts/emoji-list.html"
__EMOJI_V5_URL = "http://www.unicode.org/emoji/charts-beta/emoji-list.html"
__IMG_FOLDER_NAME = "emoji_imgs"
emoji_file = file("emoji_inverse.json", "r")
emojis = json.loads(emoji_file.read().decode("utf-8-sig"))
print "emoji_inverse.json loaded"
def decode_base64(data):
"""Decode base64, padding being optional.
:param data: Base64 data as an ASCII byte string
:returns: The decoded byte string.
"""
missing_padding = 4 - len(data) % 4
if missing_padding:
data += b'=' * missing_padding
return base64.decodestring(data)
def unicodes_str_to_emoji(unicodes):
if isinstance(unicodes, unicode):
unicodes = unicodes.encode("utf8")
else:
print "not a string"
return
list_unicode = unicodes.split(' ')
emoji = ''
for code in list_unicode:
code = code[2:]
pending_size = 8 - len(code)
for _ in range(pending_size):
code = '0' + code
code = '\U' + code
emoji += code
return unicode(emoji, "unicode_escape").encode("utf8")
def crawler_emojis(version):
print "get version: " + version
# create folder
dir_path = __IMG_FOLDER_NAME + '_' + version
if not os.path.exists(dir_path):
os.makedirs(dir_path)
print "folder created"
URL = ''
if version == 'V4':
URL = __EMOJI_V4_URL
elif version == 'V5':
URL = __EMOJI_V5_URL
__PAGE = urllib.urlopen(__EMOJI_V4_URL)
__HTML = __PAGE.read()
__PAGE.close()
__SOUP = BeautifulSoup(__HTML, 'html.parser')
print "Get Page"
_code_list = []
_img_list = []
_name_list = []
for td in __SOUP.find_all("td"):
_class_name = td.get("class")[0]
if _class_name == "code":
_code_list.append(td.a.get_text())
elif _class_name == "andr":
_img_list.append(td.a.img.get("src"))
elif _class_name == "name":
_name_list.append(td.get_text())
_json_list = []
for i in range(len(_code_list)):
# encode img
img_base64 = _img_list[i]
img_data = decode_base64(img_base64[21:])
code = _code_list[i]
emoji = unicodes_str_to_emoji(code)
name_to_save = code + ".png"
# save img to disk
with open(dir_path + "/" + name_to_save, "wb") as f:
f.write(img_data)
f.close()
# write data in json form
if emoji.decode('utf-8') in emojis:
name = emojis[emoji.decode('utf-8')]
else:
name = ''
data = {
"unicode": code,
"name": name,
"description": _name_list[i].encode('utf-8'),
"img": name_to_save,
"emoji": emoji
}
_json_list.append(data)
data_file_name = version + '_data.json'
with open(data_file_name, 'w') as outfile:
json.dump(_json_list, outfile, indent=4, sort_keys=True, ensure_ascii=False)
print "Done version " + version + "\n"
crawler_emojis('V4')
crawler_emojis('V5')
| 28.568966 | 84 | 0.60169 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 796 | 0.240193 |
4b0bae7ae91cfcfff2eabb361271fc8c258445e7 | 1,628 | py | Python | venv/Lib/site-packages/traits/tests/test_constant.py | richung99/digitizePlots | 6b408c820660a415a289726e3223e8f558d3e18b | [
"MIT"
] | 1 | 2022-01-18T17:56:51.000Z | 2022-01-18T17:56:51.000Z | venv/Lib/site-packages/traits/tests/test_constant.py | richung99/digitizePlots | 6b408c820660a415a289726e3223e8f558d3e18b | [
"MIT"
] | null | null | null | venv/Lib/site-packages/traits/tests/test_constant.py | richung99/digitizePlots | 6b408c820660a415a289726e3223e8f558d3e18b | [
"MIT"
] | null | null | null | # (C) Copyright 2005-2021 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
import unittest
from traits.api import Constant, HasTraits, TraitError
class TestConstantTrait(unittest.TestCase):
def test_initial_value(self):
class TestClass(HasTraits):
c_atr = Constant(5)
self.assertEqual(TestClass().c_atr, 5)
def test_mutable_initial_value(self):
class TestClass(HasTraits):
c_atr_1 = Constant([1, 2, 3, 4, 5])
c_atr_2 = Constant({"a": 1, "b": 2})
obj = TestClass()
self.assertEqual(obj.c_atr_1, [1, 2, 3, 4, 5])
self.assertEqual(obj.c_atr_2, {"a": 1, "b": 2})
def test_assign_fails(self):
class TestClass(HasTraits):
c_atr = Constant(5)
with self.assertRaises(TraitError):
TestClass(c_atr=5)
with self.assertRaises(TraitError):
del TestClass().c_atr
def test_mutate_succeeds(self):
class TestClass(HasTraits):
c_atr_1 = Constant([1, 2, 3, 4, 5])
c_atr_2 = Constant({"a": 1, "b": 2})
obj = TestClass()
obj.c_atr_1.append(6)
obj.c_atr_2["c"] = 3
self.assertEqual(obj.c_atr_1, [1, 2, 3, 4, 5, 6])
self.assertEqual(obj.c_atr_2, {"a": 1, "b": 2, "c": 3})
| 29.6 | 71 | 0.625307 | 1,144 | 0.702703 | 0 | 0 | 0 | 0 | 0 | 0 | 428 | 0.262899 |
4b0c74252519e1d0763eeba5100d8c404e0ec79d | 5,072 | py | Python | midap_simulator/packet_manager.py | cap-lab/MidapSim | 4f92a9f9413c29d7e1f37e863cce90ebdde8b420 | [
"MIT"
] | 2 | 2021-03-28T16:19:06.000Z | 2022-02-26T08:58:33.000Z | midap_simulator/packet_manager.py | cap-lab/MidapSim | 4f92a9f9413c29d7e1f37e863cce90ebdde8b420 | [
"MIT"
] | null | null | null | midap_simulator/packet_manager.py | cap-lab/MidapSim | 4f92a9f9413c29d7e1f37e863cce90ebdde8b420 | [
"MIT"
] | 1 | 2021-02-22T08:44:20.000Z | 2021-02-22T08:44:20.000Z | import mmap
import numpy as np
from time import sleep
import os
class PacketManager(object):
buf_size = 0x1000
packet_size = 2072
#typedef struct _Packet{
# PacketType type;
# uint32_t size;
# uint64_t cycle;
# uint32_t address;
# uint8_t data[8];
# uint32_t flags;
#} Packet;
data_type = np.dtype([('type', 'u4'), ('size', 'u4'), ('cycle', 'u8'), ('address', 'u4'), ('data', 'f4', (512)), ('flags', 'u4')])
#typedef struct {
# volatile int start; /* index of oldest element */
# volatile int end; /* index at which to write new element */
# int capacity;
# int size;
# Packet elems[PKT_BUFFER_SIZE+1]; /* vector of elements */
#} PacketBuffer;
data_info_type = np.dtype([('start', 'u4'), ('end', 'u4'), ('capacity', 'u4'), ('size', 'u4')])
def __init__(self, path):
self._infoPath = path
self._lastCycle = 0
self._pType = self.enum('read', 'write', 'elapsed', 'terminated')
self._pFlag = self.enum('none', 'flush')
f = open(path, 'r')
name = f.readline()
ib_name = f.readline()
bi_name = f.readline()
f.close()
ibFile = open('/dev/shm' + ib_name.rstrip('\n'), 'r+')
self._sendBuffer = mmap.mmap(ibFile.fileno(), 0, mmap.PROT_READ | mmap.PROT_WRITE)
ibFile.close()
biFile = open('/dev/shm' + bi_name.rstrip('\n'), 'r+')
self._receiveBuffer = mmap.mmap(biFile.fileno(), 0, mmap.PROT_READ | mmap.PROT_WRITE)
biFile.close()
# Check if the connection is established.
self.writeRequest(0x0, 4, 0, 0)
def enum(self, *sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
def isEmpty(self, buffer):
start, end, _, _ = self.readBufInfo(buffer)
return start == end
def isFull(self, buffer):
start, end, _, _ = self.readBufInfo(buffer)
return (end + 1) % self.buf_size == start;
def readBufInfo(self, buffer):
buffer.seek(0)
data_info = np.array(np.frombuffer(buffer.read(16), dtype=self.data_info_type), dtype=self.data_info_type)
return data_info['start'], data_info['end'], data_info['capacity'], data_info['size']
def readPacket(self):
buffer = self._receiveBuffer
while self.isEmpty(buffer) == True:
sleep(0.000000001)
start, end, capacity, size = self.readBufInfo(self._receiveBuffer)
buffer.seek(16 + int(start) * self.packet_size)
data = np.array(np.frombuffer(buffer.read(self.packet_size), dtype=self.data_type), dtype=self.data_type)
# Increase the read index (start)
start = (start + 1) % self.buf_size
buffer.seek(0)
buffer.write(start.tobytes())
return data
def writePacket(self, packet):
buffer = self._sendBuffer
while self.isFull(buffer) == True:
sleep(0.000000001)
start, end, capacity, size = self.readBufInfo(buffer)
data = np.array(packet, dtype=self.data_type)
buffer.seek(16 + int(end) * self.packet_size)
buffer.write(data.tobytes())
# Increase the write index (end)
end = (end + 1) % self.buf_size
buffer.seek(4)
buffer.write(end.tobytes())
buffer.flush()
def readRequest(self, addr, size, cycle, flush = False):
delta_cycle = 0
if cycle > self._lastCycle:
delta_cycle = cycle - self._lastCycle
#packet = np.array((self._pType.read, size * 4, delta_cycle, addr * 4, 0, 0), dtype=self.data_type)
packet = np.array((self._pType.read, size, cycle, addr * 4, 0, 0), dtype=self.data_type)
if flush == True:
packet['flags'] = self._pFlag.flush
self.writePacket(packet)
packet = self.readPacket()
data = packet['data']
data = np.resize(data, int(size))
self._lastCycle = cycle
return data, packet['cycle']
def writeRequest(self, addr, size, data, cycle):
delta_cycle = 0
if cycle > self._lastCycle:
delta_cycle = cycle - self._lastCycle
#packet = np.array((self._pType.write, size * 4, delta_cycle, addr * 4, np.resize(data, 512), 0), dtype=self.data_type)
packet = np.array((self._pType.write, size, cycle, addr * 4, np.resize(data, 512), 0), dtype=self.data_type)
self.writePacket(packet)
self._lastCycle = cycle
def elapsedRequest(self, cycle):
delta_cycle = 0
if cycle > self._lastCycle + 100:
delta_cycle = cycle - self._lastCycle
if delta_cycle > 0:
packet = np.array((self._pType.elapsed, 0, int(cycle), 0, 0, 0), dtype=self.data_type)
self.writePacket(packet)
self._lastCycle = cycle
def terminatedRequest(self):
packet = np.array((self._pType.terminated, 0, 0, 0, 0, 0), dtype=self.data_type)
self.writePacket(packet)
| 33.813333 | 134 | 0.589708 | 5,003 | 0.986396 | 0 | 0 | 0 | 0 | 0 | 0 | 1,001 | 0.197358 |
4b0d7a34a5dad916ea34157afa0ac2b56a26899d | 119 | py | Python | optimizers/__init__.py | Leo-xxx/NeuronBlocks | 4ddbdc625ccec15337df3cbf85e73ed25a117989 | [
"MIT"
] | 1,257 | 2019-05-06T21:25:16.000Z | 2022-03-19T11:06:49.000Z | optimizers/__init__.py | heavenAsk/NeuronBlocks | 9b08bb8ac7ceca874c8f2541d610bc8d3278fb22 | [
"MIT"
] | 37 | 2019-05-07T00:16:13.000Z | 2021-12-31T11:55:44.000Z | optimizers/__init__.py | heavenAsk/NeuronBlocks | 9b08bb8ac7ceca874c8f2541d610bc8d3278fb22 | [
"MIT"
] | 186 | 2019-05-07T00:36:40.000Z | 2022-02-28T20:47:19.000Z | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
from torch.optim import * | 39.666667 | 59 | 0.781513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 92 | 0.773109 |
4b0eec937bcf7b4132e9bab483c930a0a86d89bc | 3,824 | py | Python | amurlevel_model/model/train_test_split.py | RaevskyDN/aij2020-amur-noflood-public | d11349b1f8cc79c18bb078392731eac32b3c56ff | [
"Apache-2.0"
] | 7 | 2021-02-17T18:55:13.000Z | 2021-07-30T13:56:19.000Z | amurlevel_model/model/train_test_split.py | RaevskyDN/aij2020-amur-noflood-public | d11349b1f8cc79c18bb078392731eac32b3c56ff | [
"Apache-2.0"
] | null | null | null | amurlevel_model/model/train_test_split.py | RaevskyDN/aij2020-amur-noflood-public | d11349b1f8cc79c18bb078392731eac32b3c56ff | [
"Apache-2.0"
] | 1 | 2022-01-23T15:11:43.000Z | 2022-01-23T15:11:43.000Z | # -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
from datetime import date
from typing import Union,Tuple,Optional,List
from ..config_features import CATEGORICAL_FEATURES,NUMERICAL_FEATURES
from ..config import DAYS_FORECAST,ALL_STATIONS
from ..utils.normalizer import get_normalizer_stats
def train_test_split(amur_df: pd.DataFrame,
start_test_date: Union[date,str],
end_test_date: Union[date,str],
fname: Optional[str]=None,
numerical_features: Optional[List[str]]=None,
categorical_features: Optional[List[str]]=None) -> Tuple[np.array,np.array,np.array,np.array]:
'''
Деление на трейн, тест для обучения.
Шаг с которым идем по трейну - 1 день, шак с которым идем по тесту - 10 дней
Итоговый шейп [n,DAYS_FORECAST,n_features] - n - объем выборки,
DAYS_FORECAST - количество дней предсказания (10),
n_features - количество признаков
:param amur_df: pd.DataFrame
:param start_test_date: date,str - начало по времени тестовой выборки
:param end_test_date: date,str - конец по времени тестовой выборки
:param fname: str, путь до файла json cо статистикой mean,std для каждого поля
:param numerical_features: List[str] - список численных признаков
:param categorical_features: List[str] - список категориальных признаков
:return: tuple:
X_train - обучающая выборка
y_train - метки для обучающей выборки
X_test - тестовая выборка
y_test - метки для обучающей выборки
'''
if numerical_features is None:
numerical_features = NUMERICAL_FEATURES
if categorical_features is None:
categorical_features = CATEGORICAL_FEATURES
targets = ['sealevel_max_' + identifier for identifier in ALL_STATIONS]
train = amur_df[amur_df['date'] < start_test_date].copy()
test = amur_df[(amur_df['date'] >= start_test_date) &
(amur_df['date'] < end_test_date)].copy()
stats = get_normalizer_stats(fname)
for col in numerical_features:
_mean = stats[col]['mean']
_std = stats[col]['std']
train[col] = (train[col] - _mean) / _std
test[col] = (test[col] - _mean) / _std
train.sort_values('date', inplace=True)
train_x_array = []
train_y_array = []
step = 0
while True:
if step + DAYS_FORECAST + 1 >= len(train):
break
if train.iloc[step:step + DAYS_FORECAST][targets].count().min() < DAYS_FORECAST:
step += 1
continue
train_x_array.append(train.iloc[step:step + DAYS_FORECAST][numerical_features + categorical_features].values)
train_y_array.append(train.iloc[step:step + DAYS_FORECAST][targets].values)
step += 1
X_train = np.transpose(np.dstack(train_x_array), (2, 0, 1))
y_train = np.transpose(np.dstack(train_y_array), (2, 0, 1))
step = 0
test.sort_values('date', inplace=True)
test_x_array = []
test_y_array = []
while True:
if step >= len(test):
break
if test.iloc[step:step + DAYS_FORECAST][targets].count().min() < DAYS_FORECAST:
step += DAYS_FORECAST
continue
test_x_array.append(test.iloc[step:step + DAYS_FORECAST][numerical_features + categorical_features].values)
test_y_array.append(test.iloc[step:step + DAYS_FORECAST][targets].values)
if step + DAYS_FORECAST*2+1 >= len(test):
break
step += DAYS_FORECAST
X_test = np.transpose(np.dstack(test_x_array), (2, 0, 1))
y_test = np.transpose(np.dstack(test_y_array), (2, 0, 1))
return X_train, y_train, X_test, y_test | 41.565217 | 117 | 0.636245 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,469 | 0.349512 |
4b11b0281ea28ca55d21c7ab676dce9fefb150be | 35 | py | Python | bin/preprocessor/__init__.py | ian0549/AI-Audio-Task | 6918fc4e7fd337c5649c47925ad5b8d999fda0e1 | [
"MIT"
] | null | null | null | bin/preprocessor/__init__.py | ian0549/AI-Audio-Task | 6918fc4e7fd337c5649c47925ad5b8d999fda0e1 | [
"MIT"
] | null | null | null | bin/preprocessor/__init__.py | ian0549/AI-Audio-Task | 6918fc4e7fd337c5649c47925ad5b8d999fda0e1 | [
"MIT"
] | null | null | null | from .Preprocessor import Pipeline
| 17.5 | 34 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4b136b651e1325beb870ea9f5a79512ec242273e | 80,229 | py | Python | common/ui.py | Regnareb/StreamManager | 8b95e785d41c78f03725077f5dce2a5c15e0354f | [
"MIT"
] | null | null | null | common/ui.py | Regnareb/StreamManager | 8b95e785d41c78f03725077f5dce2a5c15e0354f | [
"MIT"
] | null | null | null | common/ui.py | Regnareb/StreamManager | 8b95e785d41c78f03725077f5dce2a5c15e0354f | [
"MIT"
] | null | null | null | import os
import sys
import copy
import ctypes
import socket
import logging
import threading
import functools
import webbrowser
logger = logging.getLogger(__name__)
import keyboard
from PySide2 import QtCore, QtWidgets, QtGui, QtWebEngineWidgets
# TODO
# Be able to import a text file in the description/title as variables (to have counters and currentsong for example)
# Rajouter dans le menu contextuel les variables %CATEGORY% et autres fichiers monitorés
# Pouvoir ajouter un commandbot avec des commandes customs (!game !currentsong)
# Add About and Help menu entries
# Automatically switch scenes in OBS depending of the game played
# Add an XML/EDL file and add each marker created for import into premiere/resolve/FCP
# Change color tray icon to green if update channel with new process or red + toast message if error
# Add trayicons for dropped frames and stream/record states
# Do a notification if the user has not used a streaming process for X minutes if any service is online (to prevent streaming unnoticed)
# Faire un streamdeck customisable qui change automatiquement les touches selon le programme utilisé https://interactjs.io/
# Being able to put it in portrait without changing icons layout
# Add Multi Actions with pause timers
# Create an independant server that scan the foreground process and send it to the receiver, this way multi computer streaming is possible
# websocket plugin ( https://github.com/Elektordi/obs-websocket-py ) Show Scene selector, MIC and DEFAULT volume, RECORD and STREAMING status and STATS
import common.manager
import common.remote
import common.tools
import common.systray
class QLoggerHandler(common.tools.HtmlStreamHandler):
def __init__(self, signal):
super().__init__()
self.signal = signal
def emit(self, record):
message = self.format(record)
self.signal.emit(QtCore.SIGNAL("logMsg(QString)"), message)
class LogPanel(QtWidgets.QDockWidget):
changed_loglevel = QtCore.Signal(str)
def __init__(self, parent=None):
super().__init__(parent=parent)
self.setWindowTitle('Logs')
self.setObjectName('docklogs')
self.levels = ['Debug', 'Info', 'Warning', 'Error', 'Critical']
self.interface = {}
self.interface['main'] = QtWidgets.QWidget()
self.interface['layoutv'] = QtWidgets.QVBoxLayout()
self.interface['layouth'] = QtWidgets.QHBoxLayout()
self.interface['label'] = QtWidgets.QLabel('Logs Level:')
self.interface['levels'] = QtWidgets.QComboBox()
self.interface['levels'].insertItems(0, self.levels)
self.interface['levels'].currentIndexChanged.connect(self.changed_loglevel.emit)
self.interface['textedit'] = QtWidgets.QTextBrowser()
self.interface['textedit'].setOpenLinks(False)
self.interface['clear'] = QtWidgets.QPushButton('Clear')
self.interface['clear'].clicked.connect(self.interface['textedit'].clear)
self.interface['layouth'].addStretch()
self.interface['layouth'].addWidget(self.interface['label'])
self.interface['layouth'].addWidget(self.interface['levels'])
self.interface['layouth'].addStretch()
self.interface['layouth'].addWidget(self.interface['clear'])
self.interface['layoutv'].addLayout(self.interface['layouth'])
self.interface['layoutv'].addWidget(self.interface['textedit'])
self.interface['main'].setLayout(self.interface['layoutv'])
self.setWidget(self.interface['main'])
# Use old syntax signals as you can't have multiple inheritance with QObject
self.emitter = QtCore.QObject()
self.connect(self.emitter, QtCore.SIGNAL("logMsg(QString)"), self.interface['textedit'].append)
self.handler = QLoggerHandler(self.emitter)
formatter = logging.Formatter('<span title="line %(lineno)d">%(levelname)s %(name)s.%(funcName)s() - %(message)s</span>')
self.handler.setFormatter(formatter)
logging.getLogger().addHandler(self.handler)
class DialogAddProcess(QtWidgets.QDialog):
def __init__(self, database, parent=None):
super().__init__(parent)
self.completer = QtWidgets.QCompleter(list(database.keys()))
self.completer.setCaseSensitivity(QtCore.Qt.CaseInsensitive)
self.linedit = QtWidgets.QLineEdit()
self.linedit.setMinimumWidth(200)
self.linedit.setCompleter(self.completer)
self.buttons = QtWidgets.QDialogButtonBox(QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel)
self.layout = QtWidgets.QVBoxLayout()
self.layout.addWidget(self.linedit)
self.layout.addWidget(self.buttons)
self.setLayout(self.layout)
self.setWindowTitle('Add Game')
self.buttons.accepted.connect(self.accept)
self.buttons.rejected.connect(self.cancel)
def cancel(self):
self.linedit.setText('')
self.close()
def closeEvent(self, event):
self.cancel()
super().closeEvent(event)
@common.tools.decorate_all_methods(common.tools.catch_exception(logger=logger))
class StreamManager_UI(common.systray.Window):
def __init__(self):
super().__init__()
self.setWindowTitle('Stream Manager')
self.setIcon(QtGui.QIcon('icon.png'))
self.load_stylesheet()
self.setCentralWidget(None)
self.log_panel = LogPanel()
self.log_panel.changed_loglevel.connect(self.set_loglevel)
self.manager = ManagerStreamThread()
self.manager.create_services()
self.manager.createdservices.connect(self.updated)
self.manager.validate.connect(self.update_invalidcategory)
self.manager.updated.connect(self.updated)
self.webremote = WebRemote(self.manager.config['base']['autostart'])
self.webremote.startedcheck.connect(self.start_check)
self.webremote.stoppedcheck.connect(self.stop_check)
self.webremote.start()
self.preferences = Preferences(self.manager, self)
self.preferences.updated.connect(self.preferences_updated)
self.preferences.finished.connect(self.set_shortcuts)
self.create_gamelayout()
self.create_statuslayout()
self.populate_appdata()
self.load_generalsettings()
self.create_menu()
self.setTabPosition(QtCore.Qt.AllDockWidgetAreas, QtWidgets.QTabWidget.North)
self.setDockOptions(QtWidgets.QMainWindow.AllowNestedDocks | QtWidgets.QMainWindow.AllowTabbedDocks)
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.log_panel)
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.panel_status['dock'])
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.gameslayout['dock'])
self.panel_status['dock'].raise_()
self.setAcceptDrops(True)
self.set_shortcuts(init=True)
self.read_qsettings()
if self.manager.config['base']['starttray']:
self.hide()
else:
self.show()
def set_dockable(self, state=None):
if state==None:
state = self.dockable.isChecked()
for i in [self.log_panel, self.gameslayout['dock'], self.panel_status['dock']]:
dummy = None if state else QtWidgets.QWidget()
i.setTitleBarWidget(dummy)
self.dockable.setChecked(state)
def read_qsettings(self):
self.settings = QtCore.QSettings('regnareb', 'Stream Manager')
if self.settings.value('initialised_once'):
self.restoreGeometry(self.settings.value('geometry'))
self.restoreState(self.settings.value('windowState'))
self.log_panel.interface['levels'].setCurrentIndex(self.log_panel.interface['levels'].findText(self.settings.value('logslevel')))
self.set_loglevel(self.settings.value('logslevel'))
logger.info('Loaded settings from last session.')
self.set_dockable(bool(self.settings.value('dockable')))
else:
self.first_launch()
def first_launch(self):
logger.info('First launch.')
self.set_loglevel('Warning')
self.tabifyDockWidget(self.panel_status['dock'], self.gameslayout['dock'])
self.tabifyDockWidget(self.gameslayout['dock'], self.log_panel)
self.log_panel.hide()
self.preferences.open()
self.preferences.tabs.setCurrentIndex(1)
self.preferences.tabs.tabBar().hide()
self.set_dockable(False)
self.settings.setValue('initialised_once', 1)
def closeEvent(self, event):
if self.trayIcon.isVisible():
if not self.settings.value('showed_quitmessage'):
QtWidgets.QMessageBox.information(self, "Minimise to System Tray", "The program will keep running in the system tray. To terminate the program, choose <b>Quit</b> in the context menu of the system tray icon.")
self.settings.setValue("showed_quitmessage", True)
self.panel_status['webpage'].load(QtCore.QUrl(""))
super().closeEvent(event)
else:
self.quit()
def restore(self):
if self.isHidden():
self.panel_status['webpage'].load(QtCore.QUrl("http://localhost:{}/".format(self.webremote.port)))
super().restore()
def quit(self):
self.manager.quit()
self.webremote.quit()
self.webremote.terminate()
self.settings.setValue("geometry", self.saveGeometry())
self.settings.setValue("windowState", self.saveState())
self.settings.setValue("dockable", self.dockable.isChecked() or '')
self.settings.setValue("logslevel", self.log_panel.interface['levels'].currentText())
if not self.manager.save_config():
msgBox = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Critical, "Can't Save Preferences", "Couldn't save the preferences, you can copy its content in the \"Show Detail\" to try and salvage them, or send it to the developer for debug purposes.")
msgBox.setDetailedText(str(self.manager.config))
msgBox.setStandardButtons(QtWidgets.QMessageBox.Close | QtWidgets.QMessageBox.Cancel)
msgBox.setDefaultButton(QtWidgets.QMessageBox.Close)
ret = msgBox.exec_()
if ret==QtWidgets.QMessageBox.Cancel:
return
super().quit()
def preferences_updated(self):
self.set_shortcuts()
self.manager.process = ''
def load_stylesheet(self):
path = os.path.join(os.path.dirname(__file__), '..', 'data', 'theme', 'qtstylesheet.css')
with open(path) as f:
stylesheet = f.read()
self.setStyleSheet(stylesheet)
def dropEvent(self, event):
for url in event.mimeData().urls():
self.manager.load_credentials(url.toLocalFile())
def dragEnterEvent(self, event):
event.acceptProposedAction()
def start_check(self):
self.manager.start()
def stop_check(self):
self.manager.quit()
def updated(self, infos=None):
self.reload()
def reload(self):
self.panel_status['webpage'].reload()
def set_loglevel(self, level=''):
block_signals(self.log_panel.interface.values(), True)
if level not in self.log_panel.levels:
level = self.log_panel.interface['levels'].currentText()
self.manager.set_loglevel(level)
self.log_panel.interface['levels'].setCurrentIndex(self.log_panel.interface['levels'].findText(level))
block_signals(self.log_panel.interface.values(), False)
def mouseDoubleClickEvent(self, *args):
pos = self.pos()
geo = self.geometry()
if self.menuBar().isVisible():
self.setWindowFlags(self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint | QtCore.Qt.FramelessWindowHint)
else:
self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowStaysOnTopHint & ~QtCore.Qt.FramelessWindowHint)
self.show()
self.move(pos)
self.setGeometry(geo)
self.menuBar().setVisible(not self.menuBar().isVisible())
def create_menu(self):
def clipboard():
url = "http://localhost:{}/".format(self.webremote.port)
cb = QtWidgets.QApplication.clipboard()
cb.setText(url, mode=cb.Clipboard)
actionfile = self.menuBar().addMenu('File')
preferences = QtWidgets.QAction('&Preferences', self, triggered=self.preferences.open)
preferences.setMenuRole(QtWidgets.QAction.PreferencesRole)
actionfile.addAction(preferences)
actionfile.addAction(QtWidgets.QAction('&Copy Remote URL', self, triggered=clipboard))
actionfile.addSeparator()
actionfile.addAction(QtWidgets.QAction('&Import Preferences', self, triggered=self.import_settings))
actionfile.addAction(QtWidgets.QAction('&Export Preferences', self, triggered=self.export_settings))
actionfile.addAction(QtWidgets.QAction('&Import Game Database', self, triggered=self.import_database))
actionfile.addAction(QtWidgets.QAction('&Export Game Database', self, triggered=self.export_database))
actionfile.addSeparator()
actionfile.addAction(QtWidgets.QAction('&Quit', self, triggered=self.quit))
actionview = self.menuBar().addMenu('View')
self.dockable = QtWidgets.QAction('Dockable', self, triggered=self.set_dockable)
self.dockable.setCheckable(True)
actionview.addSeparator()
actionview.addAction(self.panel_status['dock'].toggleViewAction())
actionview.addAction(self.gameslayout['dock'].toggleViewAction())
actionview.addAction(self.log_panel.toggleViewAction())
actionview.addSeparator()
actionview.addAction(self.dockable)
actionhelp = self.menuBar().addMenu('Help')
actionhelp.addAction(QtWidgets.QAction('&Homepage', self, triggered=functools.partial(webbrowser.open, 'https://github.com/Regnareb/StreamManager')))
def create_gamelayout(self):
self.gameslayout = {}
self.gameslayout['llayout'] = QtWidgets.QVBoxLayout()
self.gameslayout['table'] = QtWidgets.QTableWidget()
self.gameslayout['table'].setObjectName('table_games')
self.gameslayout['table'].currentCellChanged.connect(self.load_appsettings)
self.gameslayout['table'].itemChanged.connect(self.rename_process)
self.gameslayout['table'].setEditTriggers(QtWidgets.QTableWidget.DoubleClicked)
self.gameslayout['table'].setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.gameslayout['table'].setColumnCount(1)
self.gameslayout['table'].setWordWrap(False)
self.gameslayout['table'].verticalHeader().setVisible(False)
self.gameslayout['table'].setMinimumWidth(200)
header = self.gameslayout['table'].horizontalHeader()
header.setMinimumHeight(40)
header.setSectionResizeMode(0, QtWidgets.QHeaderView.Stretch)
header.sectionClicked.connect(self.load_generalsettings)
self.gameslayout['table'].setHorizontalHeaderLabels(['GENERAL'])
self.gameslayout['add_process'] = QtWidgets.QPushButton('+')
self.gameslayout['add_process'].setFixedSize(30, 27)
self.gameslayout['add_process'].clicked.connect(self.add_process)
self.gameslayout['remove_process'] = QtWidgets.QPushButton('-')
self.gameslayout['remove_process'].setFixedSize(30, 27)
self.gameslayout['remove_process'].clicked.connect(self.remove_process)
self.gameslayout['addremove_layout'] = QtWidgets.QHBoxLayout()
self.gameslayout['addremove_layout'].addWidget(self.gameslayout['add_process'])
self.gameslayout['addremove_layout'].addWidget(self.gameslayout['remove_process'])
self.gameslayout['addremove_layout'].addStretch()
self.gameslayout['llayout'].addWidget(self.gameslayout['table'])
self.gameslayout['llayout'].addLayout(self.gameslayout['addremove_layout'])
self.gameslayout['rlayout'] = QtWidgets.QFormLayout()
self.gameslayout['rlayout'].setRowWrapPolicy(QtWidgets.QFormLayout.WrapAllRows)
self.gameslayout['stacked'] = QtWidgets.QStackedWidget()
self.gameslayout['stacked'].setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed))
self.gameslayout['stacked_processpath'] = LineEdit({True: QtWidgets.QApplication.style().standardIcon(QtWidgets.QStyle.SP_DirIcon)})
self.gameslayout['stacked_processpath'].changeButtonState(True)
self.gameslayout['stacked_processpath'].editingFinished.connect(self.save_appdata)
self.gameslayout['stacked_processpath'].buttonClicked.connect(self.get_processpath)
self.gameslayout['stacked_processpath'].setToolTip('Process Name/Path')
self.gameslayout['stacked_processlayout'] = QtWidgets.QFormLayout()
self.gameslayout['stacked_processlayout'].setRowWrapPolicy(QtWidgets.QFormLayout.WrapAllRows)
self.gameslayout['stacked_processlayout'].addRow('Executable name:', self.gameslayout['stacked_processpath'])
self.gameslayout['stacked_process'] = QtWidgets.QWidget()
self.gameslayout['stacked_processlayout'].setContentsMargins(0, 0, 0, 0)
self.gameslayout['stacked_process'].setLayout(self.gameslayout['stacked_processlayout'])
self.gameslayout['stacked_label'] = QtWidgets.QLabel()
self.gameslayout['stacked_label'].setText('Applied by default for all games if there is no data\nLocks will force this setting no matter what for all games')
self.gameslayout['stacked_label'].setAlignment(QtCore.Qt.AlignCenter)
self.gameslayout['stacked'].addWidget(self.gameslayout['stacked_process'])
self.gameslayout['stacked'].addWidget(self.gameslayout['stacked_label'])
self.gameslayout['rlayout'].addRow(self.gameslayout['stacked'])
self.gameslayout['stacked'].setCurrentWidget(self.gameslayout['stacked_label'])
elements = ['title', 'tags', 'command', 'description']
folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'data', 'theme', 'images'))
icons = {False: QtGui.QIcon(folder + "/unlock.png"), True: QtGui.QIcon(folder + "/lock.png")}
self.gameslayout['category_layout'] = QtWidgets.QHBoxLayout()
self.gameslayout['category_layout'].setSpacing(0)
self.gameslayout['category_conflicts'] = QtWidgets.QPushButton('...')
self.gameslayout['category_conflicts'].setStyleSheet('border: 1px solid rgba(0, 0, 0, 50); padding:4px')
self.gameslayout['category_conflicts'].setFixedWidth(self.gameslayout['category_conflicts'].sizeHint().height())
self.gameslayout['category_conflicts'].clicked.connect(self.show_assignations)
self.gameslayout['category'] = LineEdit(icons)
self.gameslayout['category'].setToolTip('Category')
self.gameslayout['category'].editingFinished.connect(functools.partial(self.save_appdata, validate=True))
self.completer = QtWidgets.QCompleter(list(self.manager.database.keys()))
self.completer.setCaseSensitivity(QtCore.Qt.CaseInsensitive)
self.gameslayout['category'].setCompleter(self.completer)
self.gameslayout['category_layout'].addWidget(self.gameslayout['category_conflicts'])
self.gameslayout['category_layout'].addWidget(self.gameslayout['category'])
self.gameslayout['rlayout'].addRow('Category:', self.gameslayout['category_layout'])
for key in elements:
self.gameslayout[key] = LineEdit(icons)
self.gameslayout[key].setMinimumHeight(30)
self.gameslayout[key].editingFinished.connect(self.save_appdata)
s = self.gameslayout[key].sizePolicy()
s.setRetainSizeWhenHidden(True)
self.gameslayout[key].setSizePolicy(s)
self.gameslayout[key].setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed))
self.gameslayout['rlayout'].addRow(key.title() + ':', self.gameslayout[key])
self.gameslayout[key].setToolTip(key.title())
self.gameslayout['rlayout'].labelForField(self.gameslayout['description']).setText('Game Description <span style="color:grey;">(!game)</span>:')
self.gameslayout['rlayout'].labelForField(self.gameslayout['command']).setText('Command to execute:')
self.gameslayout['container_llayout'] = QtWidgets.QWidget()
self.gameslayout['container_llayout'].setLayout(self.gameslayout['llayout'])
self.gameslayout['container_rlayout'] = QtWidgets.QWidget()
self.gameslayout['container_rlayout'].setLayout(self.gameslayout['rlayout'])
self.gameslayout['dock'] = QtWidgets.QDockWidget('Games')
self.gameslayout['dock'].setObjectName('dockgames')
self.gameslayout['dock_layout'] = QtWidgets.QHBoxLayout()
self.gameslayout['main'] = QtWidgets.QSplitter()
self.gameslayout['main'].addWidget(self.gameslayout['container_llayout'])
self.gameslayout['main'].addWidget(self.gameslayout['container_rlayout'])
self.gameslayout['main'].setStretchFactor(0, 0)
self.gameslayout['main'].setStretchFactor(1, 1)
self.gameslayout['main'].setCollapsible(0, 0)
self.gameslayout['main'].setCollapsible(1, 0)
self.gameslayout['main'].addWidget(self.gameslayout['container_rlayout'])
self.gameslayout['dock'].setWidget(self.gameslayout['main'])
def create_filedialog(self, action='open'):
if action == 'open':
path, _filters = QtWidgets.QFileDialog.getOpenFileName()
elif action == 'save':
path, _filters = QtWidgets.QFileDialog.getSaveFileName()
return path
def get_processpath(self, *args):
path = self.create_filedialog()
if path:
self.gameslayout['stacked_processpath'].setText(path)
def add_process(self):
self.nodal = DialogAddProcess(self.manager.database)
self.nodal.exec_()
name = self.nodal.linedit.text()
if name:
row = self.create_gamerow(name)
index = self.gameslayout['table'].indexFromItem(row)
self.gameslayout['table'].setCurrentIndex(index)
if not self.rename_process():
self.gameslayout['table'].removeRow(index.row())
self.load_appsettings()
def rename_process(self, *args):
current = self.gameslayout['table'].currentItem()
new = current.text()
old = current._process
if not new:
current.setText(old)
return None
if self.manager.config['appdata'].get(new, ''):
msgBox = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Warning, "That Process Already Exists", 'The process "{}" already exists, are you sure you want to do that?\nIt will replace the old settings with the current ones.'.format(new))
msgBox.setStandardButtons(QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel)
logger.warning('The same process is already registered: {}'.format(new))
ret = msgBox.exec_()
if ret == QtWidgets.QMessageBox.Ok:
# Delete the old data and replace with current
item = [i for i in self.gameslayout['table'].findItems(new, QtCore.Qt.MatchExactly) if i is not current][0]
index = self.gameslayout['table'].indexFromItem(item)
self.gameslayout['table'].removeRow(index.row())
currentindex = self.gameslayout['table'].indexFromItem(current)
self.gameslayout['table'].setCurrentIndex(currentindex)
else:
# Return to the previous name
current.setText(old)
return None
self.manager.rename_process(old, new)
current._process = new
self.gameslayout['table'].sortByColumn(0, QtCore.Qt.AscendingOrder)
return True
def remove_process(self):
current = self.gameslayout['table'].currentItem()
if current:
self.manager.remove_process(current.text())
self.gameslayout['table'].removeRow(self.gameslayout['table'].currentRow())
def import_settings(self):
path = self.create_filedialog(action='open')
if path:
self.manager.load_config(path, backup=False)
def export_settings(self):
path = self.create_filedialog(action='save')
if path:
self.manager.save_config(path)
def import_database(self):
path = self.create_filedialog(action='open')
if path:
self.manager.import_database(path)
def export_database(self):
path = self.create_filedialog(action='save')
if path:
self.manager.export_database(path)
def save_appdata(self, validate=False):
current = self.gameslayout['table'].currentItem()
cat = self.gameslayout['category'].text()
title = self.gameslayout['title'].text()
description = self.gameslayout['description'].text()
tags = self.gameslayout['tags'].text().split(',')
command = self.gameslayout['command'].text()
tags = [i.strip() for i in tags if i]
data = {'category': cat, 'title': title, 'tags': tags, 'description': description, 'command': command}
if validate:
self.manager.config['assignations'] = self.manager.validate_assignations(self.manager.config['assignations'], cat)
if current and current.text():
self.manager.config['appdata'][current.text()].update(data)
self.manager.config['appdata'][current.text()]['path'][sys.platform] = self.gameslayout['stacked_processpath'].text()
self.update_gamerow(current)
elif not current:
for key in data.copy():
data['forced_' + key] = self.gameslayout[key].button.state
self.manager.config['base'].update(data)
self.manager.process = '' # Reset current process to be able to apply new settings
logger.debug(data)
def show_assignations(self):
category = self.gameslayout['category'].text()
self.preferences.open()
self.preferences.tabs.setCurrentIndex(2)
self.preferences.tabs.tabBar().hide()
if category:
index = self.preferences.tab_assignations.interface['processes'].findText(category)
self.preferences.tab_assignations.interface['processes'].setCurrentIndex(index)
def update_invalidcategory(self, category):
if self.manager.is_validcategories(category):
self.gameslayout['category_conflicts'].setStyleSheet('background: rgba(0, 0, 0, 15)')
elif category == self.gameslayout['category'].text():
self.gameslayout['category_conflicts'].setStyleSheet('background: rgba(255, 0, 0, 255)')
current = self.gameslayout['table'].currentItem()
if current:
self.update_gamerow(current)
def update_gamerow(self, row):
if row.text():
category = self.manager.config['appdata'].get(row.text(), {}).get('category', '')
self.gameslayout['table'].blockSignals(True)
if self.manager.is_validcategories(category):
row.setBackground(QtGui.QBrush())
else:
row.setBackground(QtGui.QColor(255,0,0))
self.gameslayout['table'].blockSignals(False)
def create_gamerow(self, process=''):
self.gameslayout['table'].blockSignals(True)
self.gameslayout['table'].itemChanged.disconnect(self.rename_process) # QtBug workaround because the signal itemChanged is not blocked
row = QtWidgets.QTableWidgetItem()
row.setText(process)
row._process = process
self.update_gamerow(row)
rowcount = self.gameslayout['table'].rowCount()
self.gameslayout['table'].insertRow(rowcount)
self.gameslayout['table'].setItem(rowcount, 0, row)
self.gameslayout['table'].itemChanged.connect(self.rename_process)
self.gameslayout['table'].blockSignals(False)
return row
def populate_appdata(self):
for process in self.manager.config['appdata']:
self.create_gamerow(process)
self.gameslayout['table'].sortByColumn(0, QtCore.Qt.AscendingOrder)
def load_appsettings(self, *args):
block_signals(self.gameslayout.values(), True)
current = self.gameslayout['table'].currentItem()
if current:
process = current.text()
self.gameslayout['stacked'].setCurrentWidget(self.gameslayout['stacked_process'])
val = self.manager.config['appdata'].get(process, {})
finalvals = self.manager.get_informations(process)
self.gameslayout['stacked_processpath'].setText(val.get('path', {}).get(sys.platform, ''))
self.gameslayout['category'].setText(val.get('category'))
self.gameslayout['title'].setText(val.get('title'))
self.gameslayout['description'].setText(val.get('description'))
self.gameslayout['tags'].setText(', '.join(val.get('tags', [])))
self.gameslayout['command'].setText(val.get('command'))
self.gameslayout['title'].setPlaceholderText(finalvals.get('title'))
self.gameslayout['category'].setPlaceholderText(finalvals.get('category'))
self.gameslayout['tags'].setPlaceholderText(', '.join(finalvals.get('tags')))
self.gameslayout['description'].setPlaceholderText(finalvals.get('description'))
self.gameslayout['command'].setPlaceholderText(finalvals.get('command'))
self.gameslayout['title'].setButtonVisibility(False)
self.gameslayout['category'].setButtonVisibility(False)
self.gameslayout['command'].setButtonVisibility(False)
self.gameslayout['description'].setButtonVisibility(False)
self.gameslayout['tags'].setButtonVisibility(False)
self.gameslayout['remove_process'].setEnabled(True)
self.update_invalidcategory(val.get('category'))
block_signals(self.gameslayout.values(), False)
def load_generalsettings(self, *args):
block_signals(self.gameslayout.values(), True)
self.gameslayout['table'].clearSelection()
self.gameslayout['table'].setCurrentCell(-1, -1)
self.gameslayout['stacked'].setCurrentWidget(self.gameslayout['stacked_label'])
val = self.manager.config['base']
elements = ['category', 'title', 'tags', 'description', 'command']
for key in elements:
self.gameslayout[key].setPlaceholderText('')
self.gameslayout['category'].setText(val.get('category'))
self.gameslayout['title'].setText(val.get('title'))
self.gameslayout['description'].setText(val.get('description'))
self.gameslayout['tags'].setText(','.join(val.get('tags', [])))
self.gameslayout['command'].setText(val.get('command'))
self.gameslayout['title'].setButtonVisibility(True)
self.gameslayout['category'].setButtonVisibility(True)
self.gameslayout['command'].setButtonVisibility(True)
self.gameslayout['description'].setButtonVisibility(True)
self.gameslayout['tags'].setButtonVisibility(True)
self.gameslayout['title'].changeButtonState(val.get('forced_title', ''))
self.gameslayout['category'].changeButtonState(val.get('forced_category', ''))
self.gameslayout['command'].changeButtonState(val.get('forced_command', ''))
self.gameslayout['description'].changeButtonState(val.get('forced_description', ''))
self.gameslayout['tags'].changeButtonState(val.get('forced_tags', []))
self.gameslayout['remove_process'].setEnabled(False)
self.update_invalidcategory(val.get('category'))
block_signals(self.gameslayout.values(), False)
def set_shortcuts(self, init=False):
if init:
QtWidgets.QShortcut(QtGui.QKeySequence("F11"), self, self.mouseDoubleClickEvent)
QtWidgets.QShortcut(QtGui.QKeySequence("F5"), self, self.reload)
keyboard.add_hotkey(self.manager.config['shortcuts']['create_clip'], self.manager.create_clip)
keyboard.add_hotkey(self.manager.config['shortcuts']['create_marker'], self.manager.create_marker)
def create_statuslayout(self):
self.panel_status = {}
self.panel_status['dock'] = QtWidgets.QDockWidget('Status')
self.panel_status['dock'].setObjectName('dockstatus')
self.panel_status['webpage'] = QtWebEngineWidgets.QWebEngineView()
self.panel_status['webpage'].setAcceptDrops(False)
self.panel_status['webpage'].page().profile().clearHttpCache()
self.panel_status['webpage'].load(QtCore.QUrl("http://localhost:{}/".format(self.webremote.port)))
self.panel_status['dock'].setWidget(self.panel_status['webpage'])
def block_signals(iterable, block):
for i in iterable:
i.blockSignals(block)
class Preferences(QtWidgets.QDialog):
updated = QtCore.Signal()
finished = QtCore.Signal()
def __init__(self, manager, parent=None):
super().__init__(parent)
self.tabs = QtWidgets.QTabWidget()
self.tab_general = Preferences_General(manager)
self.tab_streams = Preferences_Streams(manager)
self.tab_assignations = Preferences_Assignations(manager)
self.tab_pauseprocesses = Preferences_Pauseprocesses(manager)
self.tab_pauseservices = Preferences_Pauseservices(manager)
self.tabs.addTab(self.tab_general, "General")
self.tabs.addTab(self.tab_streams, "Streams Services")
self.tabs.addTab(self.tab_assignations, "Game Assignations")
self.tabs.addTab(self.tab_pauseprocesses, "Pause Processes")
if sys.platform == 'win32':
self.tabs.addTab(self.tab_pauseservices, "Pause Windows Services")
self.buttons = QtWidgets.QDialogButtonBox(QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel)
self.buttons.accepted.connect(self.accept)
self.buttons.rejected.connect(self.cancel)
self.mainLayout = QtWidgets.QVBoxLayout()
self.mainLayout.addWidget(self.tabs)
self.mainLayout.addWidget(self.buttons)
self.setLayout(self.mainLayout)
self.setWindowTitle('Preferences')
def reset(self):
self.tabs.tabBar().show()
self.tab_general.reset()
self.tab_streams.reset()
self.tab_pauseservices.reset()
self.tab_pauseprocesses.reset()
self.tab_assignations.reset()
def accept(self):
self.tab_general.accept()
self.tab_streams.accept()
self.tab_pauseservices.accept()
self.tab_pauseprocesses.accept()
self.tab_assignations.accept()
self.updated.emit()
super().accept()
def cancel(self):
self.finished.emit()
self.reject()
def closeEvent(self, event):
self.cancel()
super().closeEvent(event)
def open(self):
keyboard.unhook_all()
self.reset()
super().open()
class Preferences_General(QtWidgets.QWidget):
def __init__(self, manager, parent=None):
super().__init__(parent)
self.manager = manager
self.interface = {}
self.interface['layout'] = QtWidgets.QFormLayout()
self.interface['autostart'] = QtWidgets.QCheckBox()
self.interface['starttray'] = QtWidgets.QCheckBox()
self.interface['checktimer'] = QtWidgets.QSpinBox()
self.interface['reload'] = QtWidgets.QSpinBox()
self.interface['timeout'] = QtWidgets.QSpinBox()
self.interface['port'] = QtWidgets.QSpinBox()
self.interface['label_autostart'] = QtWidgets.QLabel('Automatically start the check')
self.interface['label_starttray'] = QtWidgets.QLabel('Automatically start minimised to the tray icon')
self.interface['label_checktimer'] = QtWidgets.QLabel('Check the foreground process every (x) seconds')
self.interface['label_reload'] = QtWidgets.QLabel('Reload the status webpage every (x) minutes')
self.interface['label_timeout'] = QtWidgets.QLabel('Number of seconds before the token creation timeouts')
self.interface['label_port'] = QtWidgets.QLabel('Port to use for the webremote (needs a restart)')
self.interface['checktimer'].setMinimum(1)
self.interface['reload'].setMinimum(5)
self.interface['timeout'].setMinimum(1)
self.interface['port'].setMinimum(1025)
self.interface['port'].setMaximum(65535)
self.interface['label_autostart'].setMinimumHeight(30)
self.interface['label_starttray'].setMinimumHeight(30)
self.interface['label_checktimer'].setMinimumHeight(30)
self.interface['label_reload'].setMinimumHeight(30)
self.interface['label_timeout'].setMinimumHeight(30)
self.interface['label_port'].setMinimumHeight(30)
self.interface['autostart'].setMinimumHeight(30)
self.interface['starttray'].setMinimumHeight(30)
self.interface['checktimer'].setMinimumHeight(30)
self.interface['reload'].setMinimumHeight(30)
self.interface['timeout'].setMinimumHeight(30)
self.interface['port'].setMinimumHeight(30)
self.interface['line'] = QtWidgets.QFrame()
self.interface['line'].setObjectName('stream_line')
self.interface['line'].setFrameShape(QtWidgets.QFrame.HLine)
self.interface['label_createclip'] = QtWidgets.QLabel('Create Clip')
self.interface['shortcut_createclip'] = KeySequenceRecorder('')
self.interface['label_createclip'].setMinimumHeight(30)
self.interface['shortcut_createclip'].setMinimumHeight(30)
self.interface['label_createmarker'] = QtWidgets.QLabel('Create Marker')
self.interface['shortcut_createmarker'] = KeySequenceRecorder('')
self.interface['label_createmarker'].setMinimumHeight(30)
self.interface['shortcut_createmarker'].setMinimumHeight(30)
self.interface['layout'].addRow(self.interface['label_autostart'], self.interface['autostart'])
self.interface['layout'].addRow(self.interface['label_starttray'], self.interface['starttray'])
self.interface['layout'].addRow(self.interface['label_checktimer'], self.interface['checktimer'])
self.interface['layout'].addRow(self.interface['label_reload'], self.interface['reload'])
self.interface['layout'].addRow(self.interface['label_timeout'], self.interface['timeout'])
self.interface['layout'].addRow(self.interface['label_port'], self.interface['port'])
self.interface['layout'].addRow(self.interface['line'])
self.interface['layout'].addRow(self.interface['label_createclip'], self.interface['shortcut_createclip'])
self.interface['layout'].addRow(self.interface['label_createmarker'], self.interface['shortcut_createmarker'])
self.setLayout(self.interface['layout'])
def accept(self):
self.manager.config['base']['checktimer'] = self.interface['checktimer'].text()
self.manager.config['base']['autostart'] = self.interface['autostart'].isChecked()
self.manager.config['base']['starttray'] = self.interface['starttray'].isChecked()
self.manager.config['base']['reload'] = self.interface['reload'].text()
self.manager.config['base']['timeout'] = self.interface['timeout'].text()
self.manager.config['base']['port'] = self.interface['port'].text()
self.manager.config['shortcuts']['create_clip'] = self.interface['shortcut_createclip'].text()
self.manager.config['shortcuts']['create_marker'] = self.interface['shortcut_createmarker'].text()
socket.setdefaulttimeout(int(self.manager.config['base']['timeout']))
def reset(self):
self.interface['checktimer'].setValue(int(self.manager.config['base']['checktimer']))
self.interface['autostart'].setChecked(self.manager.config['base']['autostart'])
self.interface['starttray'].setChecked(self.manager.config['base']['starttray'])
self.interface['reload'].setValue(int(self.manager.config['base']['reload']))
self.interface['timeout'].setValue(int(self.manager.config['base']['timeout']))
self.interface['port'].setValue(int(self.manager.config['base']['port']))
self.interface['shortcut_createclip'].setText(self.manager.config['shortcuts']['create_clip'])
self.interface['shortcut_createmarker'].setText(self.manager.config['shortcuts']['create_marker'])
class Preferences_Assignations(QtWidgets.QDialog):
def __init__(self, manager, parent=None):
super().__init__(parent)
self.manager = manager
self.interface = {}
self.interface['layout'] = QtWidgets.QVBoxLayout()
self.interface['label'] = QtWidgets.QLabel('Some stream services do not use the same name for the same activity. You can match the category for each services.\nFor example Youtube has only "Gaming" and no specific game in its database.')
self.interface['label'].setAlignment(QtCore.Qt.AlignCenter)
self.interface['hlayout'] = QtWidgets.QHBoxLayout()
self.interface['processes'] = QtWidgets.QComboBox()
self.interface['validate'] = QtWidgets.QPushButton('Check All')
self.interface['processes'].setFixedHeight(27)
self.interface['validate'].setFixedHeight(27)
self.interface['validate'].clicked.connect(self.validate)
self.interface['table'] = QtWidgets.QTableWidget()
self.interface['table'].horizontalHeader().setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.interface['table'].verticalHeader().setSectionResizeMode(QtWidgets.QHeaderView.Fixed)
self.interface['table'].horizontalHeader().setSectionResizeMode(QtWidgets.QHeaderView.Stretch)
self.interface['table'].setWordWrap(True)
self.interface['hlayout'].addWidget(self.interface['processes'])
self.interface['hlayout'].addWidget(self.interface['validate'])
self.interface['layout'].addWidget(self.interface['label'])
self.interface['layout'].addLayout(self.interface['hlayout'])
self.interface['layout'].addWidget(self.interface['table'])
self.servicesorder = sorted(common.manager.SERVICES)
self.setLayout(self.interface['layout'])
self.set_layoutvertical()
def set_layoutvertical(self):
self.interface['processes'].show()
self.interface['processes'].currentIndexChanged.connect(self.populate)
self.interface['table'].insertColumn(0)
for service in self.servicesorder:
rowcount = self.interface['table'].rowCount()
self.interface['table'].insertRow(rowcount)
widget = QtWidgets.QLineEdit()
widget.editingFinished.connect(functools.partial(self.save_assignation, service))
widget.textEdited.connect(functools.partial(self.edited, widget, service))
self.interface['table'].setCellWidget(rowcount, 0, widget)
if not common.manager.SERVICES[service].Main.features['category']:
widget.setDisabled(True)
self.interface['line_' + service] = widget
self.interface['table'].setVerticalHeaderLabels(self.servicesorder)
self.interface['table'].horizontalHeader().setVisible(False)
def edited(self, widget, service, text):
# Add a QTimer to prevent lag
service = self.manager.services.get(service)
if service:
autocompletion = service.query_category(text)
self.interface['completer'] = QtWidgets.QCompleter(list(autocompletion.keys()))
self.interface['completer'].setCompletionMode(QtWidgets.QCompleter.UnfilteredPopupCompletion)
self.interface['completer'].activated.connect(functools.partial(self.set_validautocomplete, service.name)) # If activated() then validated automatically
widget.setCompleter(self.interface['completer'])
def set_validautocomplete(self, service, text):
"""Force validation of the current category and service."""
current = self.interface['processes'].currentText()
self.temporary_settings.setdefault(current, {}).setdefault(service, {})
self.temporary_settings[current][service] = {'name': text, 'valid': True}
self.populate()
def validate(self, category=None):
if category:
category = self.interface['processes'].currentText()
self.temporary_settings = self.manager.validate_assignations(self.temporary_settings, category)
self.populate()
def populate(self):
block_signals(self.interface.values(), True)
current = self.interface['processes'].currentText()
for index, service in enumerate(self.servicesorder):
text = self.temporary_settings.get(current, {}).get(service, {}).get('name', '')
valid = self.temporary_settings.get(current, {}).get(service, {}).get('valid', None)
disabled = not common.manager.SERVICES[service].Main.features['category']
widget = self.interface['line_' + service]
widget.setText(text if not disabled else '')
if disabled:
widget.setStyleSheet('background-color:#efefef;border: transparent')
elif valid is None:
widget.setStyleSheet('background-color:#bbdefb;border: transparent')
elif not valid:
widget.setStyleSheet('background-color:#faa;border: transparent')
else:
widget.setStyleSheet('background-color:transparent')
block_signals(self.interface.values(), False)
def save_assignation(self, service):
category = self.interface['processes'].currentText()
widget = self.interface['line_' + service]
current = widget.text()
old = self.temporary_settings.get(category, {}).get(service, {}).get('name', '')
if category and current != old:
self.temporary_settings.setdefault(category, {}).setdefault(service, {})
self.temporary_settings[category][service] = {'name': current, 'valid': ''}
self.validate(category)
def accept(self):
assignations = self.manager.validate_assignations(self.temporary_settings)
self.manager.config['assignations'] = assignations
def reset(self):
block_signals(self.interface.values(), True)
self.temporary_settings = copy.deepcopy(self.manager.config['assignations'])
self.interface['processes'].clear()
categories = [i['category'] for i in self.manager.config['appdata'].values()]
self.interface['processes'].insertItems(0, sorted(categories))
self.populate()
block_signals(self.interface.values(), False)
class Preferences_Streams(QtWidgets.QWidget):
def __init__(self, manager, parent=None):
# add get token button
super().__init__(parent)
self.manager = manager
self.panel_services = {}
self.panel_services['container'] = QtWidgets.QGridLayout()
self.panel_services['llayout'] = QtWidgets.QVBoxLayout()
self.panel_services['list'] = QtWidgets.QTableWidget()
self.panel_services['list'].setObjectName('table_services')
self.panel_services['list'].setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
self.panel_services['list'].setColumnCount(1)
self.panel_services['list'].setWordWrap(False)
self.panel_services['list'].verticalHeader().setVisible(False)
self.panel_services['list'].verticalHeader().setDefaultSectionSize(40)
self.panel_services['list'].horizontalHeader().setVisible(False)
self.panel_services['list'].horizontalHeader().setSectionResizeMode(0, QtWidgets.QHeaderView.Stretch)
self.panel_services['list'].currentCellChanged.connect(self.service_changed)
self.panel_services['list'].setFixedWidth(150)
self.panel_services['llayout'].addWidget(self.panel_services['list'])
self.panel_services['settings_formlayout'] = QtWidgets.QFormLayout()
self.panel_services['label_delay'] = QtWidgets.QLabel('Delay before Clip/Marker Creation')
self.panel_services['label_delay'].setToolTip('Useful when you stream with a delay timer, the clip will then be synced accordingly.')
self.panel_services['label_delay'].setMinimumHeight(30)
self.panel_services['line_delay'] = QtWidgets.QSpinBox()
self.panel_services['line_delay'].setToolTip('Useful when you stream with a delay timer, the clip will then be synced accordingly.')
self.panel_services['line_delay'].setMinimum(0)
self.panel_services['line_delay'].editingFinished.connect(functools.partial(self.save_servicedata, 'delay'))
self.panel_services['line_delay'].setMinimumHeight(30)
self.panel_services['settings_formlayout'].addRow(self.panel_services['label_delay'], self.panel_services['line_delay'])
self.elements = ['enabled', 'scope', 'redirect_uri', 'authorization_base_url', 'token_url', 'client_id', 'client_secret']
self.panel_services['advanced_settings_formlayout'] = QtWidgets.QFormLayout()
for elem in self.elements[1:]:
namelabel = 'label_' + elem
nameline = 'line_' + elem
self.panel_services[namelabel] = QtWidgets.QLabel(elem.replace('_', ' ').capitalize())
if elem in ['client_id', 'client_secret']:
self.panel_services[nameline] = LineditSpoiler()
self.panel_services[nameline].setProperty('mandatory', True)
else:
self.panel_services[nameline] = QtWidgets.QLineEdit()
self.panel_services[nameline].editingFinished.connect(functools.partial(self.save_servicedata, elem))
self.panel_services['advanced_settings_formlayout'].addRow(self.panel_services[namelabel], self.panel_services[nameline])
self.panel_services[namelabel].setObjectName(namelabel)
self.panel_services['label_client_id'].setTextFormat(QtCore.Qt.RichText)
self.panel_services['label_client_id'].setOpenExternalLinks(True)
self.panel_services['collapsible'] = CollapsibleBox("Advanced Settings")
self.panel_services['collapsible'].setContentLayout(self.panel_services['advanced_settings_formlayout'])
self.panel_services['collapsible_layout'] = QtWidgets.QVBoxLayout()
self.panel_services['collapsible_layout'].addWidget(self.panel_services['collapsible'])
self.panel_services['collapsible_layout'].addStretch()
self.panel_services['settings'] = QtWidgets.QVBoxLayout()
self.panel_services['settings'].addLayout(self.panel_services['settings_formlayout'])
self.panel_services['settings'].addLayout(self.panel_services['collapsible_layout'])
self.panel_services['label_enabled'] = QtWidgets.QLabel('Enabled')
self.panel_services['line_enabled'] = QtWidgets.QPushButton()
self.panel_services['line_enabled'].setCheckable(True)
self.panel_services['line_enabled'].setFixedWidth(71)
self.panel_services['line_enabled'].setObjectName('enable_service')
self.panel_services['line_enabled'].clicked.connect(functools.partial(self.save_servicedata, 'enabled'))
self.panel_services['label_enabled'].setMinimumHeight(30)
self.panel_services['line_enabled'].setMinimumHeight(30)
self.panel_services['reset_token'] = QtWidgets.QPushButton('Reset Auth')
self.panel_services['reset_token'].clicked.connect(self.reset_token)
self.panel_services['reset_token'].setMinimumHeight(30)
self.panel_services['hlayout'] = QtWidgets.QHBoxLayout()
self.panel_services['hlayout'].addWidget(self.panel_services['label_enabled'])
self.panel_services['hlayout'].addWidget(self.panel_services['line_enabled'])
self.panel_services['hlayout'].addStretch()
self.panel_services['hlayout'].addWidget(self.panel_services['reset_token'])
self.panel_services['line'] = QtWidgets.QFrame()
# self.panel_services['line'].setMinimumHeight(30)
self.panel_services['line'].setObjectName('stream_line')
self.panel_services['line'].setFrameShape(QtWidgets.QFrame.HLine)
self.panel_services['features_layout'] = QtWidgets.QVBoxLayout()
self.panel_services['features_layout'].setSpacing(0)
self.panel_services['label_features'] = QtWidgets.QLabel('Features')
self.panel_services['label_features'].setDisabled(True)
features = list(common.manager.SERVICES['Facebook'].Main.features.keys())
for feat in features:
name = 'feature_' + feat
self.panel_services[name] = QtWidgets.QLabel(feat)
self.panel_services[name].setAlignment(QtCore.Qt.AlignCenter)
self.panel_services[name].setObjectName('features')
self.panel_services['features_layout'].addWidget(self.panel_services['feature_' + feat])
if feat == features[0]:
updateStyle(self.panel_services[name], 'firstv', True)
elif feat == features[-1]:
updateStyle(self.panel_services[name], 'lastv', True)
self.panel_services['container'].addLayout(self.panel_services['llayout'], 0, 0, -1, 1)
self.panel_services['container'].addLayout(self.panel_services['hlayout'], 0, 1, 1, -1)
self.panel_services['container'].addWidget(self.panel_services['line'], 1, 1, 1, -1)
self.panel_services['container'].addLayout(self.panel_services['features_layout'], 3, 1)
self.panel_services['container'].addLayout(self.panel_services['settings'], 3, 3, -1, 1)
self.panel_services['container'].setRowStretch(self.panel_services['container'].rowCount(), 1)
self.setLayout(self.panel_services['container'])
self.panel_services['list'].itemSelectionChanged.connect(self.service_changed)
def paintEvent(self, paintEvent):
item = self.panel_services['list'].currentItem()
service = item.text()
imgpath = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'data', 'theme', 'images', service + '.png'))
if os.path.isfile(imgpath):
pixmap = QtGui.QPixmap()
pixmap.load(imgpath)
widWidth = self.width()
widHeight = self.height()
pixmap = pixmap.scaled(10, widHeight, QtCore.Qt.KeepAspectRatioByExpanding)
paint = QtGui.QPainter(self)
paint.setOpacity(0.3)
paint.drawPixmap(widWidth-pixmap.width()*0.8, -pixmap.height()*0.2, pixmap)
def create_servicesrows(self):
self.panel_services['list'].blockSignals(True)
while self.panel_services['list'].rowCount():
self.panel_services['list'].removeRow(0)
for service in common.manager.SERVICES:
row = StreamTableWidgetItem(service)
rowcount = self.panel_services['list'].rowCount()
self.panel_services['list'].insertRow(rowcount)
self.panel_services['list'].setItem(rowcount, 0, row)
row.set_disabledrowstyle(self.temporary_settings[service].get('enabled', False))
self.panel_services['list'].setCurrentCell(rowcount, 0)
if self.temporary_settings[service].get('enabled', False):
self.service_changed()
if not self.check_service():
logger.error("The service {} is activated in the settings but it couldn't be created".format(service))
self.panel_services['list'].sortItems(QtCore.Qt.AscendingOrder)
self.panel_services['list'].blockSignals(False)
def service_changed(self):
block_signals(self.panel_services.values(), True)
item = self.panel_services['list'].currentItem()
service = item.text()
config = self.temporary_settings[service]
for elem in self.elements:
if elem == 'enabled':
val = config.get(elem, False)
self.panel_services['line_' + elem].setChecked(val)
item.set_disabledrowstyle(val)
else:
self.panel_services['line_' + elem].setText(str(config.get(elem, '')))
self.panel_services['label_client_id'].setText('Client id (<a href="{}">?</a>)'.format(common.manager.SERVICES[service].Main.devurl))
features = common.manager.SERVICES[service].Main.features
for feat, state in features.items():
updateStyle(self.panel_services['feature_' + feat], 'available', state)
if not features['clips']:
self.panel_services['label_delay'].hide()
self.panel_services['line_delay'].hide()
else:
self.panel_services['label_delay'].show()
self.panel_services['line_delay'].show()
self.panel_services['line_delay'].setValue(int(config.get('delay', 0)))
self.repaint()
block_signals(self.panel_services.values(), False)
def check_service(self):
item = self.panel_services['list'].currentItem()
service = item.text()
state = self.panel_services['line_enabled'].isChecked()
if state:
service = self.manager.create_service(service, self.temporary_settings[service], force=True)
if service:
self.temporary_settings[service.name] = service.config # Save access token
return True
if not service:
self.panel_services['line_enabled'].setChecked(False)
self.save_servicedata('enabled')
QtWidgets.QToolTip().showText(self.panel_services['line_enabled'].mapToGlobal(QtCore.QPoint(0, 20)), "<nobr>Couldn't create the service.</nobr><br><nobr>Check your <b style='color:red'>client id</b> and <b style='color:red'>client secret</b> below.</nobr> <br><br>The quota API for this service may have been reached and can't be used anymore for some time.", msecDisplayTime=10000)
return False
def save_servicedata(self, element):
item = self.panel_services['list'].currentItem()
service = item.text()
if element == 'delay':
self.temporary_settings[service][element] = self.panel_services['line_delay'].text()
return
if element == 'enabled':
result = self.panel_services['line_enabled'].isChecked()
else:
result = self.panel_services['line_' + element].text()
if self.temporary_settings[service][element] != result:
self.temporary_settings[service][element] = result
if element != 'enabled':
self.reset_token()
self.check_service()
item.set_disabledrowstyle(self.temporary_settings[service]['enabled'])
def reset_token(self):
service = self.panel_services['list'].currentItem().text()
self.temporary_settings[service]['authorization'] = {}
self.check_service()
def accept(self):
for service in self.temporary_settings:
self.manager.config['streamservices'][service] = self.temporary_settings[service]
self.manager.services = {}
self.manager.create_services()
def reset(self):
self.temporary_settings = copy.deepcopy(self.manager.config['streamservices'])
self.create_servicesrows()
self.panel_services['list'].setCurrentCell(0, 0)
class StreamTableWidgetItem(QtWidgets.QTableWidgetItem):
def __init__(self, service):
super().__init__()
self.service = service
imgpath = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'data', 'theme', 'images', self.service + '.png'))
self.setIcon(QtGui.QPixmap(imgpath))
self.setText(self.service)
self.setFlags(self.flags() & ~QtCore.Qt.ItemIsEditable)
def set_disabledrowstyle(self, val):
if val:
color = QtGui.QColor.fromRgbF(0.282, 0.855, 0.255, 1)
self.setForeground(QtGui.QColor(0, 0, 0))
else:
color = QtGui.QColor.fromRgbF(1, 0, 0, 1)
self.setForeground(QtGui.QColor(150, 150, 150))
gradient = QtGui.QRadialGradient(130, 20, 5, 120, 20)
gradient.setColorAt(0, color)
gradient.setColorAt(0.8, color)
gradient.setColorAt(1, QtGui.QColor.fromRgbF(0, 0, 0, 0))
self.setBackground(QtGui.QBrush(gradient))
class Preferences_Pause(QtWidgets.QWidget):
def __init__(self, manager, name, parent=None):
super().__init__(parent)
self.manager = manager
self.config = self.manager.config['base'][name]
self.panel_pause = {}
self.panel_pause['container'] = QtWidgets.QGridLayout()
self.panel_pause['label'] = QtWidgets.QLabel('When you click "START" any entry on the right side will be paused until the button "STOP" is pressed again.<br/>Usefull for automatically pausing applications that use bandwith or CPU.')
self.panel_pause['label'].setAlignment(QtCore.Qt.AlignCenter)
for elem in ['list', 'list_pause']:
self.panel_pause[elem] = QtWidgets.QTableWidget()
self.panel_pause[elem].setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.panel_pause[elem].setColumnCount(1)
self.panel_pause[elem].setWordWrap(False)
self.panel_pause[elem].verticalHeader().setVisible(False)
self.panel_pause[elem].horizontalHeader().setVisible(False)
self.panel_pause[elem].horizontalHeader().setSectionResizeMode(0, QtWidgets.QHeaderView.Stretch)
self.panel_pause['refresh'] = QtWidgets.QPushButton('🔃')
self.panel_pause['add'] = QtWidgets.QPushButton('→')
self.panel_pause['remove'] = QtWidgets.QPushButton('←')
self.panel_pause['refresh'].setFlat(True)
self.panel_pause['add'].setFlat(True)
self.panel_pause['remove'].setFlat(True)
self.panel_pause['refresh'].clicked.connect(self.populate_pauseprocess)
self.panel_pause['add'].clicked.connect(functools.partial(self.transfer_pauseprocess, 'add'))
self.panel_pause['remove'].clicked.connect(functools.partial(self.transfer_pauseprocess, 'remove'))
self.panel_pause['addremove_widget'] = QtWidgets.QWidget()
self.panel_pause['addremove_layout'] = QtWidgets.QVBoxLayout()
self.panel_pause['addremove_layout'].addWidget(self.panel_pause['refresh'])
self.panel_pause['addremove_layout'].addStretch()
self.panel_pause['addremove_layout'].addWidget(self.panel_pause['add'])
self.panel_pause['addremove_layout'].addWidget(self.panel_pause['remove'])
self.panel_pause['addremove_layout'].addStretch()
self.panel_pause['addremove_widget'].setLayout(self.panel_pause['addremove_layout'])
self.setLayout(self.panel_pause['container'])
self.panel_pause['container'].addWidget(self.panel_pause['label'], 0, 0, 1, -1)
self.panel_pause['container'].addWidget(self.panel_pause['list'], 1, 0, -1, 1)
self.panel_pause['container'].addWidget(self.panel_pause['addremove_widget'], 1, 1, -1, 1)
self.panel_pause['container'].addWidget(self.panel_pause['list_pause'], 1, 2, -1, 1)
def populate_pauseprocess(self):
while self.panel_pause['list'].rowCount():
self.panel_pause['list'].removeRow(0)
while self.panel_pause['list_pause'].rowCount():
self.panel_pause['list_pause'].removeRow(0)
self.currentprocesses = self.list_processes()
def insertrow(name, destination):
row = QtWidgets.QTableWidgetItem()
row.setText(name)
rowcount = destination.rowCount()
destination.insertRow(rowcount)
destination.setItem(rowcount, 0, row)
done = []
for service in self.currentprocesses.values():
if service['name'] in self.currentconfig:
insertrow(service['name'], self.panel_pause['list_pause'])
else:
insertrow(service['name'], self.panel_pause['list'])
done.append(service['name'])
for process in self.currentconfig:
if process not in done:
insertrow(process, self.panel_pause['list_pause'])
self.panel_pause['list'].sortByColumn(0, QtCore.Qt.AscendingOrder)
self.panel_pause['list_pause'].sortByColumn(0, QtCore.Qt.AscendingOrder)
def transfer_pauseprocess(self, operation):
if operation == 'add':
source = self.panel_pause['list']
destination = self.panel_pause['list_pause']
else:
source = self.panel_pause['list_pause']
destination = self.panel_pause['list']
item = source.currentItem()
if item:
item = item.text()
row = QtWidgets.QTableWidgetItem()
row.setText(item)
rowcount = destination.rowCount()
source.removeRow(source.currentRow())
destination.insertRow(rowcount)
destination.setItem(rowcount, 0, row)
self.panel_pause['list'].sortByColumn(0, QtCore.Qt.AscendingOrder)
self.panel_pause['list_pause'].sortByColumn(0, QtCore.Qt.AscendingOrder)
if operation == 'add':
self.currentconfig.append(item)
else:
self.currentconfig.remove(item)
def list_processes(self):
return {}
def accept(self):
rowdata = []
for row in range(self.panel_pause['list_pause'].rowCount()):
item = self.panel_pause['list_pause'].item(row, 0)
rowdata.append(item.text())
self.config.clear()
[self.config.append(i) for i in rowdata]
def reset(self):
self.currentconfig = self.config.copy()
self.populate_pauseprocess()
class Preferences_Pauseservices(Preferences_Pause):
def __init__(self, manager, parent=None):
super().__init__(manager, 'services', parent)
sizepolicy = self.panel_pause['refresh'].sizePolicy()
sizepolicy.setRetainSizeWhenHidden(True)
self.panel_pause['refresh'].setSizePolicy(sizepolicy)
self.panel_pause['refresh'].hide()
self.hasoverlay = False
if sys.platform == 'win32':
if not os.path.isfile('lib/pssuspend.exe'):
self.show_overlay()
admin = ctypes.windll.shell32.IsUserAnAdmin() != 0
if not admin:
self.panel_pause['label'].setText(self.panel_pause['label'].text() + '<br><b style="color:red">Requires Admin Rights!</b> Unless you gave access to services management to your account <a href="https://www.coretechnologies.com/products/ServiceSecurityEditor/">(?)</a>')
self.panel_pause['label'].setOpenExternalLinks(True)
def disable_all(self):
for i in self.panel_pause.values():
try:
i.setDisabled(True)
except AttributeError:
pass
def list_processes(self):
return common.tools.listservices()
def populate_pauseprocess(self):
super().populate_pauseprocess()
for service in self.currentprocesses.values():
try:
item = self.panel_pause['list'].findItems(service['name'], QtCore.Qt.MatchExactly)[0]
except IndexError:
item = self.panel_pause['list_pause'].findItems(service['name'], QtCore.Qt.MatchExactly)[0]
tooltip = '{} ({})\n\n{}'.format(service['display_name'], service['status'].upper(), service['description'].replace('. ', '.\n'))
item.setToolTip(tooltip.strip())
def resizeEvent(self, event):
if self.hasoverlay:
self.overlay.move(0, 0)
self.overlay.resize(self.width(), self.height())
def show_overlay(self):
self.overlay = OverlayWidget(text='This requires admin rights and the external tool pssuspend.exe from Microsoft. Due to licences limitation it must be downloaded separately.\nEverything is automated and the file weight only 3Mo.\nDo you want to download it now?', buttontext='Download', parent=self)
self.overlay.move(0, 0)
self.overlay.resize(self.width(), self.height())
self.overlay.clicked.connect(self.download_pssuspend)
self.hasoverlay = True
self.overlay.show()
def download_pssuspend(self):
if common.tools.download_pssuspend('lib'):
self.close_overlay()
else:
self.overlay.label.setText(self.overlay.text + '\nThere was a problem during the download of the file')
def close_overlay(self):
self.overlay.close()
self.hasoverlay = False
class OverlayWidget(QtWidgets.QWidget):
clicked = QtCore.Signal()
def __init__(self, text, buttontext, parent=None):
super().__init__(parent)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground)
self.text = text
self.fillColor = QtGui.QColor(30, 30, 30, 200)
self.fillColor = QtWidgets.QWidget().palette().color(QtWidgets.QWidget().backgroundRole())
self.layout = QtWidgets.QVBoxLayout(self)
self.label = QtWidgets.QLabel(self)
self.label.setWordWrap(True)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setText(self.text)
self.button = QtWidgets.QPushButton(self)
self.button.setText(buttontext)
self.button.clicked.connect(self._clicked)
self.layout.addStretch()
self.layout.addWidget(self.label)
self.layout.addWidget(self.button)
self.layout.addStretch()
def paintEvent(self, event):
s = self.size()
qp = QtGui.QPainter()
qp.begin(self)
qp.setRenderHint(QtGui.QPainter.Antialiasing, True)
qp.setBrush(self.fillColor)
qp.drawRect(0, 0, s.width(), s.height())
def _clicked(self):
self.clicked.emit()
class Preferences_Pauseprocesses(Preferences_Pause):
def __init__(self, manager, parent=None):
super().__init__(manager, 'processes', parent)
def list_processes(self):
return common.tools.listprocesses()
def populate_pauseprocess(self):
super().populate_pauseprocess()
for process in self.currentprocesses.values():
try:
name = process['name']
item = self.panel_pause['list'].findItems(name, QtCore.Qt.MatchExactly)[0]
except IndexError:
item = self.panel_pause['list_pause'].findItems(name, QtCore.Qt.MatchExactly)[0]
tooltip = '{0} ({1:.2f}% RAM)\n{2}'.format(name, process['memory_percent'], process['exe'])
item.setToolTip(tooltip.strip())
class WebRemote(common.remote.WebRemote, QtCore.QThread):
startedcheck = QtCore.Signal()
stoppedcheck = QtCore.Signal()
def __init__(self, autostart=True):
super().__init__()
self.running = autostart
def start_check(self):
self.startedcheck.emit()
def stop_check(self):
self.stoppedcheck.emit()
def run(self):
if self.running:
self.start_check()
self.server()
self.exec_()
class ManagerStreamThread(common.manager.ManageStream, QtCore.QThread):
validate = QtCore.Signal(str)
updated = QtCore.Signal(dict)
createdservices = QtCore.Signal()
def run(self):
with common.tools.pause_processes(self.config['base']['processes']):
with common.tools.pause_services(self.config['base']['services']):
self.create_services()
self.checktimer = QtCore.QTimer()
self.checktimer.timeout.connect(self.main)
self.checktimer.start(int(self.config['base']['checktimer']) * 1000)
self.exec_()
def main(self):
self.create_commandbots()
result = self.check_application()
if result:
self.updated.emit(result)
logger.info(result)
def create_services(self):
super().create_services()
self.createdservices.emit()
# @common.tools.threaded
def validate_assignations(self, config, category=None):
result = super().validate_assignations(config, category)
if category:
self.validate.emit(category)
return result
def load_credentials(self, path=''):
if not super().load_credentials(path):
QtWidgets.QMessageBox.warning(None, "Can't Load Credentials File", "The JSON file must be wrong, check your file with a text editor or the person who sent it to you.", QtWidgets.QMessageBox.StandardButton.Ok)
def load_config(self, path='', backup=True):
if super().load_config(path, backup) == False:
msg ="The JSON file must be wrong, check your file with a text editor or validator."
if backup:
msg += "The preferences have been reset, the old preferences are still available at this path:\n{}".format(self.config_filepath+'_error')
msgBox = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Critical, "Can't Load Preference File", msg)
msgBox.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)
msgBox.exec_()
class StateButtons():
buttonClicked = QtCore.Signal(bool)
def __init__(self, icons, parent=None):
super().__init__(parent)
self.button = QtWidgets.QToolButton(self)
self.button.state = None
self.button.icons = icons
self.button.setStyleSheet('border: none; padding: 0px;')
self.button.setCursor(QtCore.Qt.PointingHandCursor)
self.button.clicked.connect(functools.partial(self.changeButtonState))
self.setButtonVisibility(True)
def setButtonVisibility(self, state):
frameWidth = self.style().pixelMetric(QtWidgets.QStyle.PM_DefaultFrameWidth)
buttonSize = self.button.sizeHint()
if state:
self.button.show()
self.setStyleSheet('padding-right: %dpx;' % (buttonSize.width() + frameWidth + 1))
self.setMinimumSize(max(self.minimumSizeHint().width(), buttonSize.width() + frameWidth*2 + 2),
max(self.minimumSizeHint().height(), buttonSize.height() + frameWidth*2 + 2))
else:
self.button.hide()
self.setStyleSheet('padding-right: 0px;')
def changeButtonState(self, state=None):
if state == None:
try:
keys = list(self.button.icons.keys())
i = keys.index(self.button.state)
self.button.state = keys[i+1]
except (ValueError, IndexError):
self.button.state = keys[0]
else:
self.button.state = state
self.button.setIcon(self.button.icons[self.button.state])
self.buttonClicked.emit(self.button.state)
self.editingFinished.emit()
def resizeEvent(self, event):
buttonSize = self.button.sizeHint()
frameWidth = self.style().pixelMetric(QtWidgets.QStyle.PM_DefaultFrameWidth)
self.button.move(self.rect().right() - frameWidth - buttonSize.width(),
(self.rect().bottom() - buttonSize.height() + 1)/2)
super().resizeEvent(event)
class CollapsibleBox(QtWidgets.QWidget):
def __init__(self, title="", parent=None):
super().__init__(parent)
self.toggle_button = QtWidgets.QToolButton(text=title, checkable=True, checked=False)
self.toggle_button.setMinimumHeight(30)
self.toggle_button.setStyleSheet("QToolButton { border: none; }")
self.toggle_button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toggle_button.setCheckable(True)
self.toggle_button.setArrowType(QtCore.Qt.RightArrow)
self.toggle_button.pressed.connect(self.on_pressed)
self.content_area = QtWidgets.QScrollArea(maximumHeight=0, minimumHeight=0)
# self.content_area.setFrameShape(QtWidgets.QFrame.NoFrame)
lay = QtWidgets.QVBoxLayout(self)
lay.setSpacing(0)
lay.setContentsMargins(0, 0, 0, 0)
lay.addWidget(self.toggle_button)
lay.addWidget(self.content_area)
self.toggle_animation = QtCore.QParallelAnimationGroup(self)
self.toggle_animation.addAnimation(QtCore.QPropertyAnimation(self, b"minimumHeight"))
self.toggle_animation.addAnimation(QtCore.QPropertyAnimation(self, b"maximumHeight"))
self.toggle_animation.addAnimation(QtCore.QPropertyAnimation(self.content_area, b"maximumHeight"))
@QtCore.Slot()
def on_pressed(self):
checked = self.toggle_button.isChecked()
self.toggle_button.setArrowType(QtCore.Qt.DownArrow if not checked else QtCore.Qt.RightArrow)
self.toggle_animation.setDirection(QtCore.QAbstractAnimation.Backward if not checked else QtCore.QAbstractAnimation.Forward)
self.toggle_animation.start()
def setContentLayout(self, layout):
lay = self.content_area.layout()
del lay
self.content_area.setLayout(layout)
collapsed_height = (self.sizeHint().height() - self.content_area.maximumHeight())
content_height = layout.sizeHint().height()
for i in range(self.toggle_animation.animationCount()):
animation = self.toggle_animation.animationAt(i)
animation.setDuration(0)
animation.setStartValue(collapsed_height + content_height)
animation.setEndValue(collapsed_height)
content_animation = self.toggle_animation.animationAt(self.toggle_animation.animationCount() - 1)
content_animation.setDuration(0)
content_animation.setStartValue(content_height)
content_animation.setEndValue(0)
self.toggle_animation.start()
class PlainTextEdit(StateButtons, QtWidgets.QPlainTextEdit):
editingFinished = QtCore.Signal()
def focusOutEvent(self, event):
super().focusOutEvent(event)
self.editingFinished.emit()
class LineEdit(StateButtons, QtWidgets.QLineEdit):
pass
class LineditSpoiler(QtWidgets.QLineEdit):
def __init__(self, blurAmount=10, parent=None):
super().__init__(parent=parent)
self.blurAmount = blurAmount
self.effect = QtWidgets.QGraphicsBlurEffect(self)
self.effect.setBlurRadius(blurAmount)
self.setGraphicsEffect(self.effect)
def enterEvent(self, event):
self.effect.setBlurRadius(0)
super().enterEvent(event)
def leaveEvent(self, event):
self.effect.setBlurRadius(self.blurAmount)
super().leaveEvent(event)
class KeySequenceRecorder(QtWidgets.QLineEdit):
def __init__(self, keySequence, parent=None):
super().__init__(parent)
self.setKeySequence(keySequence)
def setKeySequence(self, keySequence):
try:
self.keySequence = keySequence.toString(QtGui.QKeySequence.NativeText)
except AttributeError:
self.keySequence = keySequence
self.setText(self.keySequence)
def keyPressEvent(self, e):
if e.type() == QtCore.QEvent.KeyPress:
key = e.key()
if key == QtCore.Qt.Key_unknown:
logger.warning('Unknown key for shortcut')
return
if(key == QtCore.Qt.Key_Control or
key == QtCore.Qt.Key_Shift or
key == QtCore.Qt.Key_Alt or
key == QtCore.Qt.Key_Meta):
return
modifiers = e.modifiers()
if modifiers & QtCore.Qt.ShiftModifier:
key += QtCore.Qt.SHIFT
if modifiers & QtCore.Qt.ControlModifier:
key += QtCore.Qt.CTRL
if modifiers & QtCore.Qt.AltModifier:
key += QtCore.Qt.ALT
if modifiers & QtCore.Qt.MetaModifier:
key += QtCore.Qt.META
self.setKeySequence(QtGui.QKeySequence(key))
def updateStyle(obj, name, value):
obj.setProperty(name, value)
obj.setStyle(obj.style())
| 50.874445 | 398 | 0.672114 | 78,270 | 0.975473 | 0 | 0 | 28,294 | 0.352626 | 0 | 0 | 14,062 | 0.175254 |
4b13fbf54481cade8e8734d48b08412beb1ed9cd | 4,009 | py | Python | tests/io/export/voc/test_create_annotation.py | wbknez/breakdb | f783820425c8cb70d8caedc6f5839a72de7c945e | [
"Apache-2.0"
] | 1 | 2020-02-03T18:31:20.000Z | 2020-02-03T18:31:20.000Z | tests/io/export/voc/test_create_annotation.py | wbknez/breakdb | f783820425c8cb70d8caedc6f5839a72de7c945e | [
"Apache-2.0"
] | null | null | null | tests/io/export/voc/test_create_annotation.py | wbknez/breakdb | f783820425c8cb70d8caedc6f5839a72de7c945e | [
"Apache-2.0"
] | null | null | null | """
Contains unit tests to ensure single database items are created correctly in a
Pascal VOC compatible format.
"""
import os
from xml.etree.ElementTree import Element, SubElement
import numpy as np
from breakdb.io.export.voc import create_annotation
from tests.helpers.dataset import create_random_string
from tests.helpers.xml import match
class TestCreateAnnotation:
"""
Test suite for :function: 'create_annotation'.
"""
def test_create_annotation_does_not_create_annotation_if_empty(self):
width = np.random.randint(100, 1920)
height = np.random.randint(100, 1200)
depth = np.random.choice([1, 3], 1)[0]
x = np.random.randint(0, width, 5)
y = np.random.randint(0, height, 5)
random_paths = [create_random_string(10) for _ in range(5)]
file_path = os.path.join(*random_paths) + ".png"
xml = create_annotation(file_path, width, height, depth, [])
expected = Element("annotation")
folder = SubElement(expected, 'folder')
filename = SubElement(expected, 'filename')
path = SubElement(expected, 'path')
source = SubElement(expected, 'source')
size = SubElement(expected, 'size')
segmented = SubElement(expected, 'segmented')
database = SubElement(source, 'database')
width_tag = SubElement(size, 'width')
height_tag = SubElement(size, 'height')
depth_tag = SubElement(size, 'depth')
folder.text = os.path.basename(os.path.dirname(file_path))
filename.text = os.path.basename(file_path)
path.text = file_path
segmented.text = "0"
database.text = "Unknown"
width_tag.text = str(width)
height_tag.text = str(height)
depth_tag.text = str(depth)
match(xml, expected)
def test_create_annotation_creates_well_formed_xml(self):
width = np.random.randint(100, 1920)
height = np.random.randint(100, 1200)
depth = np.random.choice([1, 3], 1)[0]
x = np.random.randint(0, width, 5)
y = np.random.randint(0, height, 5)
coords = [coord for coords in zip(x, y) for coord in coords]
random_paths = [create_random_string(10) for _ in range(5)]
file_path = os.path.join(*random_paths) + ".png"
xml = create_annotation(file_path, width, height, depth, [coords])
expected = Element("annotation")
folder = SubElement(expected, 'folder')
filename = SubElement(expected, 'filename')
path = SubElement(expected, 'path')
source = SubElement(expected, 'source')
size = SubElement(expected, 'size')
segmented = SubElement(expected, 'segmented')
obj = SubElement(expected, 'object')
database = SubElement(source, 'database')
width_tag = SubElement(size, 'width')
height_tag = SubElement(size, 'height')
depth_tag = SubElement(size, 'depth')
name = SubElement(obj, "name")
pose = SubElement(obj, "pose")
truncated = SubElement(obj, "truncated")
difficult = SubElement(obj, "difficult")
bndbox = SubElement(obj, "bndbox")
x_min = SubElement(bndbox, "xmin")
y_min = SubElement(bndbox, "ymin")
x_max = SubElement(bndbox, "xmax")
y_max = SubElement(bndbox, "ymax")
folder.text = os.path.basename(os.path.dirname(file_path))
filename.text = os.path.basename(file_path)
path.text = file_path
segmented.text = "0"
database.text = "Unknown"
width_tag.text = str(width)
height_tag.text = str(height)
depth_tag.text = str(depth)
name.text = f"{os.path.basename(os.path.splitext(file_path)[0])}-1"
pose.text = "Unspecified"
truncated.text = "0"
difficult.text = "0"
x_min.text = str(np.min(x))
y_min.text = str(np.min(y))
x_max.text = str(np.max(x))
y_max.text = str(np.max(y))
match(xml, expected)
| 32.860656 | 78 | 0.626091 | 3,661 | 0.913195 | 0 | 0 | 0 | 0 | 0 | 0 | 548 | 0.136692 |
4b1766db2c0ad0a27e5899ec9658c4cad1b1b54e | 8,242 | py | Python | map.py | BenoitCorsini/world-flights | 5e5ce6575a912cb6a71bf1caf6ef7c2d388044ce | [
"MIT"
] | null | null | null | map.py | BenoitCorsini/world-flights | 5e5ce6575a912cb6a71bf1caf6ef7c2d388044ce | [
"MIT"
] | null | null | null | map.py | BenoitCorsini/world-flights | 5e5ce6575a912cb6a71bf1caf6ef7c2d388044ce | [
"MIT"
] | null | null | null | import os
import os.path as osp
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Circle, Polygon, Rectangle
from config import PARAMS
class WorldMap(object):
def __init__(self, shapes=[], params=PARAMS):
'''
The 'WorldMap' class is useful in constructing a 3D figure of the world map
and contains basic function to normalize and project map coordinates.
The list 'shapes' is a list of list of coordinates, as represented below
#################################################################################
## ##
## shapes = [ ##
## points_1 = [(longitude_1, latitude_1), (longitude_2, latitude_2), ...], ##
## points_2 = [(longitude_1, latitude_1), (longitude_2, latitude_2), ...], ##
## ... ##
## ] ##
## ##
#################################################################################
'''
self.shapes = shapes
self.params = params
self.globe = None # a globe useful to clip the figures
@staticmethod
def normalize_angle(angle):
'''
Normalizes any angle to be in [-180,180).
'''
while angle >= 180:
angle -= 360
while angle < -180:
angle += 360
assert (angle >= -180) & (angle < 180) # checking that 'angle' is well-normalized
return angle
@staticmethod
def project(coord, angle=0, turn=0, flip=False, r=1, away=10):
'''
Projects the coordinates on the 3D map.
'turn' is useful for coordinates partly at the left/right end of the other side of the globe.
'away' is useful to avoid having non-desired lines on the map.
'''
x, y = coord
y = y*np.pi/180
x = x - angle + turn*360
unseen = False # if the coordinates are on the other side of the globe
pos_x = r*np.sin(x*np.pi/180)*np.cos(y)
pos_y = r*np.sin(y)
d = pos_x**2 + pos_y**2
if (x > 90) & (d <= 1):
pos_x = away*r*np.cos(y)
pos_y *= away
unseen = True
elif (x < -90) & (d <= 1):
pos_x = - away*r*np.cos(y)
pos_y *= away
unseen = True
if flip:
pos_x = - pos_x
return (pos_x, pos_y), unseen
def set_figure(self):
'''
Resets the figure.
'''
if hasattr(self, 'fig'):
plt.close('all')
# creating the general figure
self.fig, self.ax = plt.subplots(figsize=[self.params['figure']['size']]*2)
self.fig.subplots_adjust(left=0, right=1, bottom=0, top=1)
self.ax.set_axis_off()
extra = 1 + self.params['figure']['extra_space']
self.ax.set_xlim(-extra, extra)
self.ax.set_ylim(-extra, extra)
if self.params['figure']['background'] is not None:
self.ax.add_patch(Rectangle(
xy=(-2*extra, -2*extra),
width=4*extra,
height=4*extra,
color=self.params['figure']['background'],
zorder=self.params['zorder']['background']
))
def plot_globe(self, angle=0):
'''
Plots the globe and its shade as viewed from 'angle'.
'''
angle = self.normalize_angle(angle)
self.globe = Circle(
xy=(0, 0),
radius=1,
color=self.params['globe']['water_colour'],
zorder=self.params['zorder']['water'],
lw=0,
)
self.ax.add_patch(self.globe)
for shape in self.shapes:
for turn in [-1, 0, 1]: # to cover for the boundary problems
points, unseen = zip(*[self.project(point, angle, turn) for point in shape])
if not all(unseen):
# the border of the land
self.ax.add_patch(Polygon(
xy=points,
color=self.params['globe']['border_colour'],
zorder=self.params['zorder']['land_border'],
lw=self.params['globe']['border'],
clip_path=self.globe,
joinstyle='round',
))
# the main land
self.ax.add_patch(Polygon(
xy=points,
color=self.params['globe']['land_colour'],
zorder=self.params['zorder']['land'],
lw=0,
clip_path=self.globe,
))
# plotting the shade
self.plot_shade(angle)
def plot_shade(self, angle=0):
'''
Plots the shaded version of the globe.
'''
angle = self.normalize_angle(angle + self.params['shade']['angle'])
# general transformation applied on the shade
transform = self.ax.transData.get_affine()
x_shift = transform.get_matrix()[0,2]
y_shift = transform.get_matrix()[1,2]
x_scale = transform.get_matrix()[0,0]
y_scale = transform.get_matrix()[1,1]
transform.set_matrix(np.diag(np.diag(transform.get_matrix()))) # only keep the diagonal
transform.scale(
self.params['shade']['ratio']*self.params['shade']['scale'],
self.params['shade']['scale']
)
transform.rotate_deg(self.params['shade']['rotation'])
transform.translate(
x_shift + x_scale*self.params['shade']['x_pos'],
y_shift - y_scale + y_scale*self.params['shade']['y_pos']
)
# plotting the shaded world sphere
self.ax.add_patch(Circle(
xy=(0, 0),
radius=1,
color=self.params['shade']['water_colour'],
zorder=self.params['zorder']['shade_water'],
alpha=self.params['shade']['alpha'],
transform=transform,
lw=0,
))
for shape in self.shapes:
for turn in [-1, 0, 1]: # to cover for the boundary problems
points, unseen = zip(*[self.project(point, angle, turn, flip=True, away=1) for point in shape])
if not all(unseen):
self.ax.add_patch(Polygon(
xy=points,
color=self.params['shade']['land_colour'],
zorder=self.params['zorder']['shade_land'],
alpha=self.params['shade']['alpha'],
transform=transform,
lw=0,
))
def savefig(self, name='map', folder='.', title=''):
'''
Saves the current state of the figure.
'''
assert hasattr(self, 'fig')
if not osp.exists(folder):
os.makedirs(folder)
# adds a title when available
if title:
bbox = {
'boxstyle' : 'round',
'edgecolor' : self.params['text']['colour'],
'facecolor' : self.params['text']['background'],
'linewidth' : self.params['text']['border'],
}
self.ax.text(
- 1 - self.params['figure']['extra_space'] + self.params['text']['x'],
- 1 - self.params['figure']['extra_space'] + self.params['text']['y'],
title,
fontsize=self.params['text']['fontsize'],
color=self.params['text']['colour'],
#fontweight='demibold',
bbox=bbox,
)
self.fig.savefig(osp.join(folder, name + '.png'), transparent=True)
def plot(self, name='map', folder='.', title='', angle=0):
'''
Plots the world globe.
'''
self.set_figure()
self.plot_globe(angle)
self.savefig(name, folder, title) | 36.631111 | 111 | 0.473065 | 8,072 | 0.979374 | 0 | 0 | 1,263 | 0.15324 | 0 | 0 | 2,776 | 0.336811 |
4b17b051f3187df2daa4e97e42b6ba22e41b2320 | 322 | py | Python | base/models/provider.py | musicmash/notify | 0f1c72207979e812c6485238da32ca7f5b463859 | [
"MIT"
] | null | null | null | base/models/provider.py | musicmash/notify | 0f1c72207979e812c6485238da32ca7f5b463859 | [
"MIT"
] | 86 | 2020-07-13T11:14:24.000Z | 2022-03-25T01:10:30.000Z | base/models/provider.py | musicmash/notify | 0f1c72207979e812c6485238da32ca7f5b463859 | [
"MIT"
] | null | null | null | from django.db import models
from .base import BaseModel
class Provider(BaseModel):
name = models.CharField(max_length=50, primary_key=True)
class Meta:
db_table = "providers"
verbose_name = "Provider"
verbose_name_plural = "Providers"
def __str__(self):
return self.name
| 18.941176 | 60 | 0.673913 | 261 | 0.810559 | 0 | 0 | 0 | 0 | 0 | 0 | 32 | 0.099379 |
4b188ae0e512d9c128c010ae409e8f80e9a5b8ee | 6,753 | py | Python | preprocess/weixin_prepare.py | xuyuandong/sequence_behavior_ctr_model | e1bb71b4579456b1c6fbf3b432a84a3cb52611b7 | [
"MIT"
] | 4 | 2020-01-08T13:39:59.000Z | 2021-09-21T08:13:44.000Z | preprocess/weixin_prepare.py | xuyuandong/sequence_behavior_ctr_model | e1bb71b4579456b1c6fbf3b432a84a3cb52611b7 | [
"MIT"
] | null | null | null | preprocess/weixin_prepare.py | xuyuandong/sequence_behavior_ctr_model | e1bb71b4579456b1c6fbf3b432a84a3cb52611b7 | [
"MIT"
] | 3 | 2020-01-09T02:45:14.000Z | 2021-09-21T08:13:59.000Z | import random
import numpy as np
import cPickle as pkl
Train_handle = open("./data/weixin_data/weixin_train.txt",'w')
Test_handle = open("./data/weixin_data/weixin_test.txt",'w')
Feature_handle = open("./data/weixin_data/weixin_feature.pkl",'w')
max_len = 50
def produce_neg_item_hist_with_cate(train_file, test_file):
item_dict = {}
sample_count = 0
hist_seq = 0
for line in train_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
sample_count += 1
for item in hist_list:
item_dict.setdefault(str(item),0)
for line in test_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
sample_count += 1
for item in hist_list:
item_dict.setdefault(str(item),0)
#print item_dict.keys()[:10]
del(item_dict["('0', '0', '0')"])
neg_array = np.random.choice(np.array(item_dict.keys()), (sample_count, max_len*2))
neg_list = neg_array.tolist()
sample_count = 0
for line in train_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
neg_hist_list = []
while len(neg_hist_list) < hist_seq:
for item in neg_list[sample_count]:
item = eval(item)
if item not in hist_list:
neg_hist_list.append(item)
if len(neg_hist_list) == hist_seq:
break
sample_count += 1
neg_item_list, neg_vmid_list, neg_cate_list = zip(*neg_hist_list)
Train_handle.write(line.strip() + "\t" + ",".join(neg_item_list) + "\t" + ",".join(neg_vmid_list) + "\t" + ",".join(neg_cate_list) + "\n" )
for line in test_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
neg_hist_list = []
while len(neg_hist_list) < hist_seq:
for item in neg_list[sample_count]:
item = eval(item)
if item not in hist_list:
neg_hist_list.append(item)
if len(neg_hist_list) == hist_seq:
break
sample_count += 1
neg_item_list, neg_vmid_list, neg_cate_list = zip(*neg_hist_list)
Test_handle.write(line.strip() + "\t" + ",".join(neg_item_list) + "\t" + ",".join(neg_vmid_list) + "\t" + ",".join(neg_cate_list) + "\n" )
def generate_sample_list():
max_sides = 30
max_tags = 5
max_segs = 5
train_sample_list = []
test_sample_list = []
for line in file("./data/weixin_data/local_train.txt"):
units = line.strip().split("\t")
side_list = units[2].split(",")
if len(side_list) >= max_sides:
side_list = side_list[:max_sides]
else:
side_list = side_list + ['0']*(max_sides - len(side_list))
units[2] = ','.join(side_list)
if units[6] == '':
units[6] = '0'
tags_list = units[6].split(",")
if len(tags_list) >= max_tags:
tags_list = tags_list[:max_tags]
else:
tags_list = tags_list + ['0']*(max_tags - len(tags_list))
units[6] = ','.join(tags_list)
if units[7] == '':
units[7] = '0'
segs_list = units[7].split(",")
if len(segs_list) >= max_segs:
segs_list = tags_list[:max_segs]
else:
segs_list = segs_list + ['0']*(max_segs - len(segs_list))
units[7] = ','.join(segs_list)
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
if hist_seq > max_len:
hist_list = hist_list[-max_len:]
else:
hist_list = hist_list + [('0','0','0')]*(max_len-hist_seq)
item_list, vmid_list, cate_list = zip(*hist_list)
units[8] = ','.join(item_list)
units[9] = ','.join(vmid_list)
units[10] = ','.join(cate_list)
train_sample_list.append('\t'.join(units))
for line in file("./data/weixin_data/local_test.txt"):
units = line.strip().split("\t")
side_list = units[2].split(",")
if len(side_list) >= max_sides:
side_list = side_list[:max_sides]
else:
side_list = side_list + ['0']*(max_sides - len(side_list))
units[2] = ','.join(side_list)
if units[6] == '':
units[6] = '0'
tags_list = units[6].split(",")
if len(tags_list) >= max_tags:
tags_list = tags_list[:max_tags]
else:
tags_list = tags_list + ['0']*(max_tags - len(tags_list))
units[6] = ','.join(tags_list)
if units[7] == '':
units[7] = '0'
segs_list = units[7].split(",")
if len(segs_list) >= max_segs:
segs_list = tags_list[:max_segs]
else:
segs_list = segs_list + ['0']*(max_segs - len(segs_list))
units[7] = ','.join(segs_list)
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
if hist_seq > max_len:
hist_list = hist_list[-max_len:]
else:
hist_list = hist_list + [('0','0','0')]*(max_len-hist_seq)
item_list, vmid_list, cate_list = zip(*hist_list)
units[8] = ','.join(item_list)
units[9] = ','.join(vmid_list)
units[10] = ','.join(cate_list)
test_sample_list.append('\t'.join(units))
random.shuffle(train_sample_list)
return train_sample_list, test_sample_list
if __name__ == "__main__":
train_sample_list, test_sample_list = generate_sample_list()
produce_neg_item_hist_with_cate(train_sample_list, test_sample_list)
| 37.726257 | 147 | 0.568192 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 496 | 0.073449 |
4b1897d255b0413c9d4325f9c12538b29485ce83 | 2,197 | py | Python | chapter15/cache_aside/populate_db.py | JoeanAmiee/Mastering-Python-Design-Patterns-Second-Edition | 89c55dcf5e1e0e730dde593b487050f360371932 | [
"MIT"
] | 278 | 2018-08-16T12:59:24.000Z | 2022-03-21T08:21:11.000Z | chapter15/cache_aside/populate_db.py | 50611/Mastering-Python-Design-Patterns-Second-Edition | 6efc4a935f15d2aa6c840131f72fb8c53a493a93 | [
"MIT"
] | 4 | 2019-05-16T11:44:45.000Z | 2022-02-04T07:24:47.000Z | chapter15/cache_aside/populate_db.py | 50611/Mastering-Python-Design-Patterns-Second-Edition | 6efc4a935f15d2aa6c840131f72fb8c53a493a93 | [
"MIT"
] | 166 | 2018-08-13T21:47:16.000Z | 2022-03-18T12:20:31.000Z | import sys
import sqlite3
import csv
from random import randint
from faker import Faker
fake = Faker()
def setup_db():
try:
db = sqlite3.connect('data/quotes.sqlite3')
# Get a cursor object
cursor = db.cursor()
cursor.execute('''
CREATE TABLE quotes(id INTEGER PRIMARY KEY, text TEXT)
''')
db.commit()
except Exception as e:
print(e)
finally:
db.close()
def add_quotes(quotes_list):
quotes = []
try:
db = sqlite3.connect('data/quotes.sqlite3')
cursor = db.cursor()
quotes = []
for quote_text in quotes_list:
quote_id = randint(1, 100)
quote = (quote_id, quote_text)
try:
cursor.execute('''INSERT INTO quotes(id, text) VALUES(?, ?)''', quote)
quotes.append(quote)
except Exception as e:
print(f"Error with quote id {quote_id}: {e}")
db.commit()
except Exception as e:
print(e)
finally:
db.close()
return quotes
def main():
args = sys.argv
if args[1] == 'init':
setup_db()
elif args[1] == 'update_db_and_cache':
quotes_list = [fake.sentence() for _ in range(1, 11)]
quotes = add_quotes(quotes_list)
print("New (fake) quotes added to the database:")
for q in quotes:
print(f"Added to DB: {q}")
# Populate the cache with this content
with open('data/quotes_cache.csv', "a", newline="") as csv_file:
writer = csv.DictWriter(csv_file,
fieldnames=['id', 'text'],
delimiter=";")
for q in quotes:
print(f"Adding '{q[1]}' to cache")
writer.writerow({'id': str(q[0]), 'text': q[1]})
elif args[1] == 'update_db_only':
quotes_list = [fake.sentence() for _ in range(1, 11)]
quotes = add_quotes(quotes_list)
print("New (fake) quotes added to the database ONLY:")
for q in quotes:
print(f"Added to DB: {q}")
if __name__ == "__main__":
main()
| 25.252874 | 86 | 0.522531 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 526 | 0.239417 |
4b1a17df754eb85c581497a16047422be041c22f | 523 | py | Python | amulet/world_interface/chunk/interfaces/leveldb/leveldb_13/leveldb_13_interface.py | Podshot/Amulet-Core | 678a722daa5e4487d193a7e947ccceacac325fd2 | [
"MIT"
] | null | null | null | amulet/world_interface/chunk/interfaces/leveldb/leveldb_13/leveldb_13_interface.py | Podshot/Amulet-Core | 678a722daa5e4487d193a7e947ccceacac325fd2 | [
"MIT"
] | null | null | null | amulet/world_interface/chunk/interfaces/leveldb/leveldb_13/leveldb_13_interface.py | Podshot/Amulet-Core | 678a722daa5e4487d193a7e947ccceacac325fd2 | [
"MIT"
] | null | null | null | from __future__ import annotations
from amulet.world_interface.chunk.interfaces.leveldb.leveldb_12.leveldb_12_interface import (
LevelDB12Interface,
)
class LevelDB13Interface(LevelDB12Interface):
def __init__(self):
LevelDB12Interface.__init__(self)
self.features["chunk_version"] = 13
@staticmethod
def is_valid(key):
if key[0] != "leveldb":
return False
if key[1] != 13:
return False
return True
INTERFACE_CLASS = LevelDB13Interface
| 21.791667 | 93 | 0.6826 | 325 | 0.621415 | 0 | 0 | 163 | 0.311663 | 0 | 0 | 24 | 0.045889 |
4b1b507d5e7bd884f752c61b8ba7c52263c2268a | 921 | py | Python | oarepo_model_builder/builders/jsonschema_builder.py | mesemus/oarepo-model-builder | 3dd9cc3db887c67f7b58281faae65c8162b0651e | [
"MIT"
] | null | null | null | oarepo_model_builder/builders/jsonschema_builder.py | mesemus/oarepo-model-builder | 3dd9cc3db887c67f7b58281faae65c8162b0651e | [
"MIT"
] | null | null | null | oarepo_model_builder/builders/jsonschema_builder.py | mesemus/oarepo-model-builder | 3dd9cc3db887c67f7b58281faae65c8162b0651e | [
"MIT"
] | null | null | null | from oarepo_model_builder.builders.json import JSONBuilder
from oarepo_model_builder.output import JsonSchemaOutput
class JSONSchemaBuilder(JSONBuilder):
"""Handles building of jsonschema from a data model specification."""
def __init__(self):
super().__init__()
self.output = None
def pre(self, el, config, path, outputs):
if not path:
output = outputs['jsonschema'] = JsonSchemaOutput("TODO")
self.stack[0] = output.data
else:
path_skipped = path[-1].startswith('oarepo:')
if path_skipped:
self.push(self.IGNORED_SUBTREE, path)
elif isinstance(el, dict):
self.push({}, path)
elif isinstance(el, (list, tuple)):
self.push([], path)
else:
self.push(el, path)
def post(self, el, config, path, outputs):
self.pop()
| 32.892857 | 73 | 0.587405 | 802 | 0.870793 | 0 | 0 | 0 | 0 | 0 | 0 | 96 | 0.104235 |
4b1d49f6efb27ace41851af3af43454783b205c7 | 399 | py | Python | solutions/level0_tasks.py | sksuzuki/How-to-Learn-to-Code | 347943dbd2a3d176f3459c2e9f18cba1bdf78597 | [
"MIT"
] | null | null | null | solutions/level0_tasks.py | sksuzuki/How-to-Learn-to-Code | 347943dbd2a3d176f3459c2e9f18cba1bdf78597 | [
"MIT"
] | 3 | 2019-06-06T21:11:41.000Z | 2019-06-06T21:12:37.000Z | solutions/level0_tasks.py | sksuzuki/How-to-Learn-to-Code | 347943dbd2a3d176f3459c2e9f18cba1bdf78597 | [
"MIT"
] | null | null | null | import numpy as np
def square(x):
"""Square a number"""
return x ** 2
def volume_converter(volume, unit):
"""Convert certain SI volumes to mLs"""
conversions = {'mL': 1E-3, 'uL': 1E-6, 'nL': 1E-9, 'kL': 1E3}
return round(volume * conversions[unit], 10)
def squared_sum(in_list):
"""Finds the sum of squares of a list of numbers."""
return np.sum(np.array(in_list)**2)
| 26.6 | 65 | 0.629073 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 128 | 0.320802 |
4b1e6350105907d7f3eb0e342a99233ff398a655 | 10,305 | py | Python | benchmark/automated_agents_selenium/exatag_labels_agent.py | MedTAG/medtag-core | f2dae7b38230179d71babede7e4910631d91053f | [
"MIT"
] | 6 | 2021-12-20T12:15:17.000Z | 2022-02-02T15:28:42.000Z | benchmark/automated_agents_selenium/exatag_labels_agent.py | MedTAG/medtag-core | f2dae7b38230179d71babede7e4910631d91053f | [
"MIT"
] | 1 | 2022-03-07T14:57:44.000Z | 2022-03-11T18:11:55.000Z | benchmark/automated_agents_selenium/exatag_labels_agent.py | MedTAG/medtag-core | f2dae7b38230179d71babede7e4910631d91053f | [
"MIT"
] | 2 | 2021-05-29T09:44:38.000Z | 2021-12-28T03:53:40.000Z | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import psycopg2
import time
import statistics
from selenium.webdriver.support.select import Select
import json
def wait_until_unchecked(driver,nums_3):
inp = driver.find_elements_by_xpath('//input[@name="labels"]')
count = 0
for el in nums_3:
if inp[el].is_selected() == False:
count = count +1
if count == len(nums_3):
return inp
else:
return False
def login(driver):
username = "selenium_test"
password = "selenium"
driver.get("http://examode.dei.unipd.it/exatag/")
driver.find_element_by_id("inputUsername").send_keys(username)
driver.find_element_by_id("inputPassword").send_keys(password)
driver.find_element_by_xpath('//button[text()="Log In"]').click()
try:
ele = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//button[text()="Start"]'))
)
all_spans = driver.find_elements_by_xpath("//div[@class='selection css-2b097c-container']")
for element in all_spans:
element.click()
if all_spans.index(element) == 0:
driver.find_element_by_xpath('//div[text()="English"]').click()
elif all_spans.index(element) == 1:
driver.find_element_by_xpath('//div[text()="Colon"]').click()
else:
driver.find_element_by_xpath('//div[text()="AOEC"]').click()
ele.click()
ele1 = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//button[text()="Labels"]'))
)
ele1.click()
except Exception as e:
print('ERROR')
print(e)
return False
else:
# print('ok')
return True
def exatag_lab_test(driver):
f = open('../datasets/labels/labels.json','r')
reports1 = json.load(f)
reports = []
for key in reports1.keys():
label = reports1[key]
reports.append(label)
try:
count = 0
nums = []
while count < 100:
labs = reports[count]
nums_1 = []
for cop in labs:
if cop == 'Cancer':
nums_1.append(0)
elif cop == 'Adenomatous polyp - high grade dysplasia':
nums_1.append(1)
elif cop == 'Adenomatous polyp - low grade dysplasia':
nums_1.append(2)
elif cop == 'Hyperplastic polyp':
nums_1.append(3)
elif cop == 'Non-informative':
nums_1.append(4)
nums.append(nums_1)
count = count+1
# print(str(count))
# print(str(labs))
# print('\n')
count = 0
testo = ''
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//div[@class="container_list"]'))
)
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//div[@id="report_sel"]'))
)
inp = driver.find_elements_by_xpath('//input[@name="labels"]')
start = time.time()
click = 0
while count < 100:
# time.sleep(0.02)
# if count > 0:
# selected_option = select.first_selected_option
# if (selected_option.get_attribute('value') == str(count)):
time.sleep(0.02)
testo_rep = driver.find_element_by_xpath('//div[@id="report_sel"]')
if (testo != testo_rep.text):
testo = testo_rep.text
nums_3 = []
nums_2 = nums[count]
# if count>0:
# nums_3 = nums[count-1]
sel = False
while sel == False:
ss = 0
for el in range(len(inp)):
if inp[el].is_selected() == False:
ss = ss + 1
else:
break
if ss == len(inp):
sel = True
if sel:
for el in nums_2:
inp[el].click()
click = click+1
# time.sleep(0.02)
driver.find_element_by_xpath('//button[@id="but_sx"]').click()
click = click+1
time.sleep(0.2)
# time.sleep(0.02)
count = count + 1
end = time.time()
tot = end - start
print('tot: '+str(tot))
print('click: '+str(click))
for i in range(100):
driver.find_element_by_xpath('//button[@id="but_dx"]').click()
time.sleep(0.3)
WebDriverWait(driver, 10).until(
EC.element_to_be_clickable((By.XPATH, '//button[text()="Clear"]'))
).click()
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//button[text()="Yes"]'))
).click()
time.sleep(0.3)
return tot
except Exception as e:
print('ERROR')
print(e)
return False
# else:
# # print('ok')
# # driver.quit()
# cursor.execute('SELECT gt_json FROM ground_truth_log_file WHERE username = %s ORDER BY insertion_time ASC',
# ['selenium_test'])
# ans = cursor.fetchall()
# if len(ans) != len(reports):
# st = 'A groundtruth is missing'
# return st
# count = 0
# while count < 100:
# # report = json.dump(reports[count])
# labs_john = reports[count]['labels']
# nums = []
# json_el = ans[count][0]
#
#
# for cop in labs_john:
# nums.append(int(cop['seq_number']))
#
# labs_sel = json_el['labels']
# for cop in labs_sel:
# # print(cop['seq_number'])
# # print(nums)
# # print('\n')
# if cop['seq_number'] not in nums:
# stringa = str(count) + ' : ' + str(cop) + ' is missing.'
# return stringa
# # cursor.execute('SELECT gt_json FROM ground_truth_log_file WHERE username = %s ORDER BY insertion_time ASC',['selenium_test'])
# # ans = cursor.fetchall()
# # for el in ans:
# # json_el = el[0]
# # lab = json_el['labels']
# # for cop in lab:
# # print(cop['seq_number'])
# # print(nums)
# # print('\n')
# # if cop['seq_number'] not in nums:
# # stringa = str(count) + ' : ' + str(cop) + ' is missing.'
# # return stringa
# count = count+1
# return tot
# except (Exception, psycopg2.Error) as e:
# print(e)
#
#
# finally:
# # closing database connection.
# if (connection):
# cursor.close()
# connection.close()
if __name__ == '__main__':
exec_path = "" # INSERT HERE THE PATH TO THE DRIVER
driver = webdriver.Chrome(executable_path=exec_path)
data = []
timer = 0
try:
c = 0
log_in = login(driver)
if log_in:
while c < 40:
time.sleep(2)
print(str(c))
# connection = psycopg2.connect(dbname="groundtruthdb", user="ims", password="grace.period", host="localhost",
# port="5444")
#
# cursor = connection.cursor()
# cursor.execute('SELECT COUNT(*) FROM associate where username = %s;',['selenium_test'])
# ans = cursor.fetchone()[0]
# if(ans == 100):
# cursor.execute('DELETE FROM associate where username = %s;',['selenium_test'])
# connection.commit()
#
# cursor.execute('SELECT COUNT(*) FROM ground_truth_log_file where username = %s AND gt_type = %s;',['selenium_test','labels'])
# ans = cursor.fetchone()[0]
# if(ans == 100):
# cursor.execute('DELETE FROM ground_truth_log_file where username = %s and gt_type = %s;',['selenium_test','labels'])
# connection.commit()
if c > 0:
driver.refresh()
ele1 = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//button[text()="Labels"]'))
)
ele1.click()
timer_1 = exatag_lab_test(driver)
data.append(timer_1)
print(str(timer_1))
if(type(timer_1) == 'str'):
break
else:
timer = timer + timer_1
c = c+1
except (Exception, psycopg2.Error) as e:
print(e)
finally:
# closing database connection.
# if (connection):
# cursor.close()
# connection.close()
print(timer)
std = statistics.stdev(data)
print(str(std))
| 35.78125 | 147 | 0.450849 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,615 | 0.350801 |
4b1ec72cd59cc0bdeabf8053a9474d679e3c099c | 963 | py | Python | src/main/tools/dbpy/FotechUtils/dbUtils.py | inqwell/inq | 31ce4cd6b9b123b1ec4462905ccbcf7c00d6efc3 | [
"BSD-3-Clause"
] | 1 | 2016-09-25T16:41:57.000Z | 2016-09-25T16:41:57.000Z | src/main/tools/dbpy/FotechUtils/dbUtils.py | inqwell/inq | 31ce4cd6b9b123b1ec4462905ccbcf7c00d6efc3 | [
"BSD-3-Clause"
] | null | null | null | src/main/tools/dbpy/FotechUtils/dbUtils.py | inqwell/inq | 31ce4cd6b9b123b1ec4462905ccbcf7c00d6efc3 | [
"BSD-3-Clause"
] | 2 | 2016-09-25T16:48:49.000Z | 2020-05-26T20:00:33.000Z | #
# $Header: /home/inqwell/cvsroot/dev/scripts/python/FotechUtils/dbUtils.py,v 1.1 2009/05/22 22:16:32 sanderst Exp $
#
import KBC.fotech
from Util import db
from dbConfig import configurationProvider
def getConnection( confile, system, level, access = "read", site = None, user = None, pwdfile = None ):
"""
Partial replacement for the db.py mess in cbtech/python2.5. You should use /prod/fotech/bin/generateDatabaseXml.py
to generate an xml file containing your system/level config from the old db.py. Then replace any call to db.getConnection
with dbUtils.getConnection and you should get back the same object that you would have got in the old strategy.
"""
config = configurationProvider( confile, pwdfile )
vendor, server, user, password, schema, host, port = config.getConnectionDetails( system, level, access, site, user )
return db._getConnection( vendor.upper(), server, schema, user, password )
| 45.857143 | 129 | 0.726895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 507 | 0.52648 |
4b1ec74bf2a93ae529c6f9b679c345029b8413cf | 1,517 | py | Python | randgenuntil.py | i-can-not-program/randgenuntil | cec853bc0c0a6589d60e1c6e3064e273e6278e0f | [
"Unlicense"
] | 1 | 2021-09-09T12:03:57.000Z | 2021-09-09T12:03:57.000Z | randgenuntil.py | i-can-not-program/randgenuntil | cec853bc0c0a6589d60e1c6e3064e273e6278e0f | [
"Unlicense"
] | null | null | null | randgenuntil.py | i-can-not-program/randgenuntil | cec853bc0c0a6589d60e1c6e3064e273e6278e0f | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
import random
import argparse
import sys
def error(message):
print(message)
sys.exit(1)
parser = argparse.ArgumentParser()
parser.add_argument("number",
help="Generate a random numbers until they are equal to this.", type=int)
parser.add_argument("-s", "--start", type=int, default=0,
help="The range in which the random numbers are in starts with this number. (default 0)")
parser.add_argument("-e", "--end", type=int, default=32767,
help="The range in which the random numbers are in ends with this number. (default 32767)")
parser.add_argument("-c", "--count",
help="Counts the amount of tries it takes to get to the number.", action="store_true")
parser.add_argument("-n", "--newline",
help="Adds a newline between random numbers.", action="store_true")
args = parser.parse_args()
if args.start > args.end:
error("error: start is greater than end")
if args.number > args.end or args.number < args.start:
error("error: number is either greater than end or less than start")
end = "\n" if args.newline else "\r"
rand_num = ''
tries = 0
args.end += 1
while rand_num != args.number:
width = len(str(rand_num))
rand_num = random.randrange(args.start, args.end)
print("{rand_num: <{width}}".format(rand_num=rand_num, width=width), end=end)
tries += 1
if args.count:
print("{} tries to get to {}".format(tries, args.number))
elif end == "\r":
print()
| 35.27907 | 111 | 0.652604 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 584 | 0.38497 |
4b1fa47c925f46978fe64a19c7b80b111b447a75 | 2,798 | py | Python | gopredict/modelo.py | ajalba/gopredict | bfcb1c4c10b6787da10c7515ae2adf65252bb8c6 | [
"MIT"
] | null | null | null | gopredict/modelo.py | ajalba/gopredict | bfcb1c4c10b6787da10c7515ae2adf65252bb8c6 | [
"MIT"
] | 39 | 2021-10-31T16:51:39.000Z | 2021-11-22T09:56:04.000Z | gopredict/modelo.py | ajalba/gopredict | bfcb1c4c10b6787da10c7515ae2adf65252bb8c6 | [
"MIT"
] | null | null | null | """
Clase para representar a los diferentes modelos y su comportamiento
atributos(de momento)
df=dataframe de entrenamiento proviniente del conjunto de datos de entrenamiento del usuario
x_train,x_test,y_train,y_test, particiones de df para entrenar el modelo
El resto de métodos son autoexplicativos
"""
from numpy import array
from pandas.core.frame import DataFrame
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn import metrics
class Modelo:
#Inicializa un modelo tomando sus datos
def __init__(self,data):
self.df = data
self.X_train = None
self.X_test = None
self.y_train = None
self.y_test = None
self.y_pred = None
self.modelo=LogisticRegression()
# Devuelve una particion del dataframe
def realizar_particion(self,cols_atributos:array):
aux = self.df.copy(deep=True)
return aux[cols_atributos]
#Realiza una particion en train y test
def particion_train_test(self,X:DataFrame, y:DataFrame, test_porcentaje:int):
try:
self.X_train,self.X_test,self.y_train,self.y_test=train_test_split(
X,y,test_size=test_porcentaje,random_state=0)
return True
except:
return False
#Entrena el modelo con los datos de entrenamiento
def entrenar(self):
try:
self.modelo.fit(self.X_train, self.y_train)
return True
except Exception as e:
print(e)
return False
#Realiza una prediccion sobre el conjunto de entrenamiento
def predecir_entrenamiento(self):
try:
self.y_pred = self.modelo.predict(self.X_test)
return True
except:
return False
#devuelve las métricas de rendimiento del modelo en entrenamiento
def get_metricas_rendimiento(self):
accuracy = metrics.accuracy_score(self.y_test, self.y_pred)
precision = metrics.precision_score(self.y_test, self.y_pred, zero_division=0)
recall = metrics.recall_score(self.y_test, self.y_pred)
f1 = metrics.f1_score(self.y_test, self.y_pred)
return [accuracy,precision,recall,f1]
#Devuelve las métricas para la matriz de confusion
def get_metricas_matriz_confusion(self):
return metrics.confusion_matrix(self.y_test,self.y_pred)
def get_metricas_roc(self):
y_pred_proba = self.modelo.predict_proba(self.X_test)[::,1]
fpr, tpr, _ = metrics.roc_curve(self.y_test, y_pred_proba)
fpr, tpr, _ = metrics.roc_curve(self.y_test, y_pred_proba)
roc_data = pd.DataFrame([])
roc_data['True Positive'] = tpr
roc_data['False Positive'] = fpr
return roc_data
| 36.337662 | 92 | 0.686919 | 2,272 | 0.811139 | 0 | 0 | 0 | 0 | 0 | 0 | 678 | 0.242056 |
4b21181e3a74fe5a1a2c9a5d58470698abe2c63a | 10,698 | py | Python | python/cvi_toolkit/numpy_helper/tensor_compare.py | sophgo/tpu_compiler | 6299ea0a3adae1e5c206bcb9bedf225d16e636db | [
"Apache-2.0"
] | 3 | 2022-03-14T11:47:20.000Z | 2022-03-16T01:45:37.000Z | python/cvi_toolkit/numpy_helper/tensor_compare.py | sophgo/tpu_compiler | 6299ea0a3adae1e5c206bcb9bedf225d16e636db | [
"Apache-2.0"
] | null | null | null | python/cvi_toolkit/numpy_helper/tensor_compare.py | sophgo/tpu_compiler | 6299ea0a3adae1e5c206bcb9bedf225d16e636db | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import numpy as np
import sys
import struct
# from math import fabs
from enum import IntEnum
from scipy import spatial
from math import *
from collections import OrderedDict
def second(elem):
return elem[1]
def get_topk(a, k):
k = min(a.size, k)
idx = np.argpartition(-a.ravel(), k - 1)[:k]
# return np.column_stack(np.unravel_index(idx, a.shape))
topk = list(zip(idx, np.take(a, idx)))
#return topk
topk.sort(key=second, reverse=True)
return topk
class TensorCompare():
NOT_MATCH = "NOT_MATCH"
EQUAL = "EQUAL"
NOT_EQUAL = "NOT_EQUAL"
CLOSE = "CLOSE"
SIMILAR = "SIMILAR"
NOT_SIMILAR = "NOT_SIMLIAR"
def __init__(self, close_order_tol=3,
cosine_similarity_tol = 0.99,
correlation_similarity_tol = 0.99,
euclidean_similarity_tol = 0.90,
signal_to_quantization_noise_tol = 50):
self.close_order_tol = close_order_tol
self.cosine_similarity_tol = cosine_similarity_tol
self.correlation_similarity_tol = correlation_similarity_tol
self.euclidean_similarity_tol = euclidean_similarity_tol
self.signal_to_quantization_noise_tol = signal_to_quantization_noise_tol
return
def square_rooted(self, x):
return sqrt(sum([a*a for a in x]))
def cosine_similarity(self, x, y):
numerator = sum(a*b for a,b in zip(x,y))
denominator = self.square_rooted(x)*self.square_rooted(y)
return round(numerator/float(denominator),3)
def euclidean_distance(self, x, y):
return sqrt(sum(pow(a-b,2) for a, b in zip(x, y)))
def sqnr_similarity(self, signal_raw, signal_dequant, remove_zero=True):
# SQNR is non-commutative
# Unlike other distance function
# Cannot change the order of signal_raw and signal_dequant
raw = signal_raw.flatten()
dequant = signal_dequant.flatten()
if remove_zero is True:
idx = raw != 0
raw = raw[idx]
dequant = dequant[idx]
noise = raw - dequant
avg_raw = np.sum(raw) / raw.size
avg_noise = np.sum(noise) / noise.size
raw_zero_mean = raw - avg_raw
noise_zero_mean = noise - avg_noise
var_raw_zero_mean = np.sum(np.square(raw_zero_mean))
var_noise_zero_mean = np.sum(np.square(noise_zero_mean))
if var_noise_zero_mean == 0 or var_raw_zero_mean == 0:
return float('inf')
sqnr = 10 * np.log10(var_raw_zero_mean / var_noise_zero_mean)
return sqnr
def all_diffs(self, d1, d2):
diffs = list()
d1f = d1.flatten()
d2f = d2.flatten()
if d1f.dtype == np.int8:
assert(d2f.dtype == np.int8)
for i in range(len(d1f)):
if (d1f[i] != d2f[i]):
diffs.append((i, d1f[i], d2f[i]))
else:
atol = 10**(-self.close_order_tol)
rtol = 10**(-self.close_order_tol)
for i in range(len(d1f)):
if fabs(d1f[i] - d2f[i]) > (atol + rtol * fabs(d2f[i])):
diffs.append((i, d1f[i], d2f[i]))
return diffs
def diff_details(self, d1, d2, verbose):
details = {}
if verbose > 1:
K = 10
tk1 = get_topk(d1, K)
tk2 = get_topk(d2, K)
details['top-k'] = (tk1, tk2)
if verbose > 2:
details['diffs'] = self.all_diffs(d1,d2)
if verbose > 3:
details['all'] = (d1, d2)
return details
def compare(self, d1, d2, verbose, int8_tensor_close=True):
similarities = {}
if d1.size != d2.size:
return (False, self.NOT_MATCH, similarities, None)
if np.array_equal(d1, d2):
return (True, self.EQUAL, similarities, None)
# int8 only check equal, not close
if d1.dtype == np.int8 and int8_tensor_close:
details = self.diff_details(d1, d2, verbose)
return (False, self.NOT_EQUAL, similarities, details)
# check allclose
for order in range((self.close_order_tol + 2), 1, -1):
if (np.allclose(d1, d2, rtol=1 * 10**(-order), atol=1e-8, equal_nan=True)):
break
if order >= self.close_order_tol:
similarities["close_order"] = order
return (True, self.CLOSE, similarities, None)
# check similarity
# cosine similarity
# cosine_similarity_my = self.cosine_similarity(d1.flatten(), d2.flatten())
cosine_similarity = 1 - spatial.distance.cosine(d1.flatten().astype(np.float32),
d2.flatten().astype(np.float32))
# correlation similarity
#1 - spatial.distance.correlation(d1.flatten(), d2.flatten())
correlation_similarity = cosine_similarity
# measure euclidean similarity
m = (d1+d2)/2
ed = self.euclidean_distance(d1.flatten(), d2.flatten())
sr = self.square_rooted(m.flatten())
euclidean_similarity = 1 - ed / sr
sqnr = self.sqnr_similarity(d1, d2)
similarities["cosine"] = cosine_similarity
similarities["correlation"] = correlation_similarity
similarities["euclid"] = euclidean_similarity
similarities["sqnr"] = sqnr
# check similarity
if (cosine_similarity > self.cosine_similarity_tol
and correlation_similarity > self.correlation_similarity_tol
and euclidean_similarity > self.euclidean_similarity_tol
and sqnr > self.signal_to_quantization_noise_tol):
return (True, self.SIMILAR, similarities, None)
else:
# Not similar
details = self.diff_details(d1, d2, verbose)
return (False, self.NOT_SIMILAR, similarities, details)
def int8_tensor_stats(self, d):
d_int8 = d.astype(np.int8)
pos = np.sum(d_int8 == 127)
neg = np.sum(d_int8 == -128)
zeros = np.sum(d_int8 == 0)
b_low = np.sum(np.abs(d_int8) <= 8) # 16, 32, 63
tol = d_int8.size
print(" pos(x=127) = {:.4f} [{}/{}]".format(pos / tol, pos, tol))
print(" neg(x=-128) = {:.4f} [{}/{}]".format(neg / tol, neg, tol))
print(" zeros(x=0) = {:.4f} [{}/{}]".format(zeros / tol, zeros, tol))
print(" low(abs(x)<8) = {:.4f} [{}/{}]".format(b_low / tol, b_low, tol))
def print_result(self, d1, name, result, verbose):
print("[{:<32}] {:>12} [{:>6}]".format(name, result[1],
"PASSED" if result[0] else "FAILED"))
if (verbose > 0):
print(" {} {} ".format(d1.shape, d1.dtype))
if (result[1] == self.CLOSE):
print(" close order = {}".format(result[2]["close_order"]))
if (result[1] == self.SIMILAR or result[1] == self.NOT_SIMILAR):
print(" cosine_similarity = {:.6f}".format(result[2]["cosine"]))
print(" correlation_similarity = {:.6f}".format(result[2]["correlation"]))
print(" euclidean_similarity = {:.6f}".format(result[2]["euclid"]))
print(" sqnr_similarity = {:.6f}".format(result[2]["sqnr"]))
if d1.dtype == np.int8:
self.int8_tensor_stats(d1)
details = result[-1]
if not details:
return
if (verbose > 1 and not result[0]):
print('top-k:')
print(' idx-t target idx-r ref')
tk1, tk2 = details['top-k']
for i in range(len(tk1)):
idx_t, target = tk1[i]
idx_r, ref = tk2[i]
print(" ", idx_t, target, idx_r, ref)
if (verbose > 2 and not result[0] and details['diffs'] is not None):
print("all-diffs:")
print(" idx target ref")
for i in details['diffs']:
print(" ", *i)
if (verbose > 3 and not result[0]):
print("all-elements:")
print(" idx target ref")
target, ref = details['all']
for index, val in np.ndenumerate(target):
print(" ", index, val, ref[index])
class TensorCompareStats():
def __init__(self):
self.passed = 0
self.failed = 0
self.results = OrderedDict()
self.count = {}
self.count[TensorCompare.NOT_MATCH] = 0
self.count[TensorCompare.EQUAL] = 0
self.count[TensorCompare.NOT_EQUAL] = 0
self.count[TensorCompare.CLOSE] = 0
self.count[TensorCompare.SIMILAR] = 0
self.count[TensorCompare.NOT_SIMILAR] = 0
self.min_cosine_similarity = 1.0
self.min_correlation_similarity = 1.0
self.min_euclidean_similarity = 1.0
self.min_sqnr = float('inf')
def update(self, name, result):
self.results[name] = result
if result[0]:
self.passed = self.passed + 1
assert (result[1] == TensorCompare.EQUAL
or result[1] == TensorCompare.CLOSE
or result[1] == TensorCompare.SIMILAR)
else:
self.failed = self.failed + 1
assert (result[1] == TensorCompare.NOT_EQUAL
or result[1] == TensorCompare.NOT_SIMILAR)
self.count[result[1]] = self.count[result[1]] + 1
# record min similarity
if result[1] == TensorCompare.SIMILAR or result[1] == TensorCompare.NOT_SIMILAR:
self.min_cosine_similarity = min(self.min_cosine_similarity, result[2]["cosine"])
self.min_correlation_similarity = min(self.min_correlation_similarity, result[2]["correlation"])
self.min_euclidean_similarity = min(self.min_euclidean_similarity, result[2]["euclid"])
self.min_sqnr = min(self.min_sqnr, result[2]["sqnr"])
def print_result(self):
print("%d compared"%(len(self.results)))
print("%d passed"%(self.passed))
print(" %d equal, %d close, %d similar"
%(self.count[TensorCompare.EQUAL],
self.count[TensorCompare.CLOSE],
self.count[TensorCompare.SIMILAR]))
print("%d failed"%(self.failed))
print(" %d not equal, %d not similar"
%(self.count[TensorCompare.NOT_EQUAL],
self.count[TensorCompare.NOT_SIMILAR]))
print("min_similiarity = ({}, {}, {}, {})".format(
self.min_cosine_similarity,
self.min_correlation_similarity,
self.min_euclidean_similarity,
self.min_sqnr))
def save_result(self, csv_file, operations, quant_types):
has_similarity = lambda x: (x == TensorCompare.SIMILAR
or x == TensorCompare.NOT_SIMILAR)
with open(csv_file, mode='w') as f:
f.write("name, op, quant, pass, sim_cos, sim_euc, sqnr\n")
for name, result in self.results.items():
op = operations.get(name, '-')
qtype = quant_types.get(name, '-')
is_equal = bool(result[1] == TensorCompare.EQUAL)
is_close = bool(result[1] == TensorCompare.CLOSE)
is_similar = bool(result[1] == TensorCompare.SIMILAR)
is_pass = bool(is_similar or is_close or is_equal)
cos = float(result[2]["cosine"]) if has_similarity(result[1]) else 1.0
euc = float(result[2]["euclid"]) if has_similarity(result[1]) else 1.0
sqnr = float(result[2]["sqnr"]) if has_similarity(result[1]) else float('-inf')
f.write("{}, {}, {}, {}, {}, {}, {}\n".format(
name, op, qtype, is_pass, cos, euc, sqnr))
| 37.405594 | 102 | 0.623294 | 10,203 | 0.95373 | 0 | 0 | 0 | 0 | 0 | 0 | 1,560 | 0.145822 |
4b239e38be546f3cf138ae920d55ac95f83dd8aa | 4,508 | py | Python | src/cleaners.py | RellikDog/dota2-win-predictor-v2 | 2751093d988830296ab0408a820e52bd7fd963b0 | [
"MIT"
] | 1 | 2020-06-12T00:27:45.000Z | 2020-06-12T00:27:45.000Z | src/cleaners.py | RellikDog/dota2-win-predictor-v2 | 2751093d988830296ab0408a820e52bd7fd963b0 | [
"MIT"
] | null | null | null | src/cleaners.py | RellikDog/dota2-win-predictor-v2 | 2751093d988830296ab0408a820e52bd7fd963b0 | [
"MIT"
] | null | null | null | from src.eda import make_counter
import pandas as pd
import numpy as np
from src.heroes import heroes, name_id, id_name
def id_list_from_history(data):
'''
Takes raw data returnd by api_calls.get_match_history() and returns a list of just the match ID's
Input:
data(list):
list of match objects
Output:
List of integers each representing a unique match id
'''
return [int(i['match_id']) for i in data]
def clean_match_details(match):
'''
Takes raw data from api_calls.get_match_details() and returns a dictionary with the pertinent details
Input:
match(dict):
Return of the api.steampowers api
Dict with one key-Val pair result is a dictionary with the match information
Output:
out(dict):
Dictionary of pertinent data:
radiant_win(bool): Team that won
match_date(timestamp): When the match was played
radiant_hero_ids(list of ints): List of hero Ids for the radiant team
dire_hero_ids(list of ints): List of hero Ids for the dire team
'''
data = match['result']
out = {}
out['_id'] = data['match_id']
out['radiant_win'] = int(data['radiant_win'])
out['match_date'] = data['start_time']
out['radiant_hero_ids'] = []
out['dire_hero_ids'] = []
for player in data['players']:
if player['player_slot'] < 128:
out['radiant_hero_ids'] += [player['hero_id']]
else:
out['dire_hero_ids'] += [player['hero_id']]
return out
def make_csv(counter, counter_data):
'''
Takes in a premade coutner using make_counter from eda.py and the data used to amke the counter and produces a CSV.
Input:
counter(Counter):
Counter from all the DB data - used to generate unique columns
counter_data(mongo cursor list):
return of .find() on the raw collection
Output:
None: Creates a csv file in the same directory as run
'''
#remove count column so keys includes only hero ids
del counter['count']
uids = sorted(counter.keys())
uid_cols = []
#add a column for each hero fro each team
for i in uids:
uid_cols += [(str(i)+'R')]
uid_cols += [(str(i)+'D')]
#add the initial 3 columns and combine with hero id columns
columns = ['match_id', 'match_date', 'radiant_win']
columns += uid_cols
#create a template for each row
row_template = {col: 0 for col in columns}
rows_list = []
#for each match format a row and add to list
for match in counter_data:
temp_row = row_template.copy()
temp_row['match_id'] = match['_id']
temp_row['match_date'] = match['match_date']
temp_row['radiant_win'] = match['radiant_win']
for indx, hid in enumerate(match['radiant_hero_ids']):
temp_row[(str(hid)+'R')] = 1
temp_row[(str(match['dire_hero_ids'][indx])+'D')] = 1
rows_list += [temp_row]
#use rows to create dataframe and print to csv
df = pd.DataFrame(rows_list)
df.to_csv('test.csv')
def make_pred_row(df, rad, dire):
'''
Makes a row for predicitons to be made on
Input:
df(dataframe):
Read this is from test.csv - used to generate columns
rad(list):
List of hero names recived from the front end for readiant team
dire(list):
List of hero names recived from the front end for dire team
Output:
pred_row(pandas dataframe):
Converts heros names to IDs then adds ones to the DF in the appropriate slotfor their team
'''
#drop unnessacary columns
drop_cols = ['Unnamed: 0', 'match_id', 'match_date', 'Unnamed: 1', 'radiant_win']
for i in drop_cols:
try:
df.pop(i)
except:
continue
#make blank row
pred_row = pd.DataFrame([np.zeros(len(df.columns))], columns=df.columns)
#fill in row
for indx, hero in enumerate(rad):
#get radiant hero id - insert to pred row with R
rhid = name_id(hero)
pred_row[str(rhid)+'R'] = 1.0
#get radiant hero id - insert to pred row with D
dhid = name_id(dire[indx])
pred_row[str(dhid)+'D'] = 1.0
return pred_row | 32.905109 | 119 | 0.590949 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,823 | 0.62622 |
4b26f2f9d05f6e347a28ccd82f8bc4ee81785946 | 808 | py | Python | essEcommerce/views.py | AymanTareq/cit_ecommerce | 7a000f9f9ed76af99ec3c5a5faa1dbde8b988370 | [
"CC0-1.0"
] | null | null | null | essEcommerce/views.py | AymanTareq/cit_ecommerce | 7a000f9f9ed76af99ec3c5a5faa1dbde8b988370 | [
"CC0-1.0"
] | null | null | null | essEcommerce/views.py | AymanTareq/cit_ecommerce | 7a000f9f9ed76af99ec3c5a5faa1dbde8b988370 | [
"CC0-1.0"
] | null | null | null | from django.shortcuts import render
from .models import *
def all_product(request):
products = Product.objects.all()
context = {
'products':products,
}
return render(request, 'essEcommerce/all_product.html', context)
def cart(request):
if request.user.is_authenticated:
customer = request.user.customer
order , create = Order.objects.get_or_create(customer=customer, status=False)
items = order.orderitem_set.all()
else:
items = []
order = {
'get_cart_total':0,
'get_cart_total_price':0
}
context = {
'order':order,
'items':items,
}
return render(request, 'essEcommerce/cart.html', context)
def check_out(request):
return render(request, 'essEcommerce/checkout.html')
| 26.064516 | 85 | 0.634901 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 145 | 0.179455 |
4b27ad25bbde1311e3d80132c3a579efdb94319b | 431 | py | Python | HowloserUare/api/serializers.py | HowloserUare/HowloserUare-api | c5f8a111f1c4bacba8d0932d8da7ad72dd3ce5c0 | [
"MIT"
] | null | null | null | HowloserUare/api/serializers.py | HowloserUare/HowloserUare-api | c5f8a111f1c4bacba8d0932d8da7ad72dd3ce5c0 | [
"MIT"
] | null | null | null | HowloserUare/api/serializers.py | HowloserUare/HowloserUare-api | c5f8a111f1c4bacba8d0932d8da7ad72dd3ce5c0 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from core.models import User
class UserSerializer(serializers.Serializer):
username = serializers.CharField(
max_length=16,
min_length=5,
validators=[UniqueValidator(User.objects.all()), ])
password = serializers.CharField(
max_length=64, min_length=6)
class Meta:
exclude = ('id',)
| 26.9375 | 59 | 0.712297 | 306 | 0.709977 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.009281 |
4b2885e057467c9b44c65e10af712efbc6a9cb24 | 74 | py | Python | mogpe/__init__.py | aidanscannell/mogpe | 25a9af473d73d6fa35bd060bee0eb2c372b995e5 | [
"Apache-2.0"
] | 11 | 2021-04-01T02:40:21.000Z | 2022-01-31T16:14:44.000Z | mogpe/__init__.py | aidanscannell/mogpe | 25a9af473d73d6fa35bd060bee0eb2c372b995e5 | [
"Apache-2.0"
] | null | null | null | mogpe/__init__.py | aidanscannell/mogpe | 25a9af473d73d6fa35bd060bee0eb2c372b995e5 | [
"Apache-2.0"
] | 3 | 2021-04-04T02:45:34.000Z | 2021-11-22T23:48:28.000Z | from . import experts, gating_networks, gps, mixture_of_experts, training
| 37 | 73 | 0.824324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4b29c7bf6ebe9ba8c28605961385ea51d3e8eaf4 | 49 | py | Python | pynfldata/coaches_data/__init__.py | trevorbalint/pynfldata | d27c550a5249c30cd2f32a8ad455c9c4ff623f5b | [
"MIT"
] | 3 | 2020-09-24T17:17:28.000Z | 2021-09-23T18:39:09.000Z | pynfldata/coaches_data/__init__.py | trevorbalint/pynfldata | d27c550a5249c30cd2f32a8ad455c9c4ff623f5b | [
"MIT"
] | 5 | 2019-10-29T00:31:22.000Z | 2020-01-12T12:35:52.000Z | pynfldata/coaches_data/__init__.py | tabalint/pynfldata | d27c550a5249c30cd2f32a8ad455c9c4ff623f5b | [
"MIT"
] | null | null | null | from pynfldata.coaches_data import coaches_parser | 49 | 49 | 0.918367 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4b2a94692c84f7c38268202ef3957322166618de | 991 | py | Python | tensorflow_v2/dragen1860/ch10/bn_main.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 13 | 2020-01-04T07:37:38.000Z | 2021-08-31T05:19:58.000Z | tensorflow_v2/dragen1860/ch10/bn_main.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 3 | 2020-06-05T22:42:53.000Z | 2020-08-24T07:18:54.000Z | tensorflow_v2/dragen1860/ch10/bn_main.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 9 | 2020-10-19T04:53:06.000Z | 2021-08-31T05:20:01.000Z | import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, optimizers
# BatchNorm 归一化网络激活函数
# 2 images with 4x4 size, 3 channels
# we explicitly enforce the mean and stddev to N(1, 0.5)
x = tf.random.normal([2, 4, 4, 3], mean=1.0, stddev=0.5)
net = layers.BatchNormalization(axis=-1, center=True, scale=True, trainable=True)
# layers.LayerNormalization
out = net(x)
print("forward in test mode:", net.variables)
out = net(x, training=True)
print("forward in train mode(1 step):", net.variables)
for i in range(100):
out = net(x, training=True)
print("forward in train mode(100 steps):", net.variables)
optimizer = optimizers.SGD(lr=1e-2)
for i in range(10):
with tf.GradientTape() as tape:
out = net(x, training=True)
loss = tf.reduce_mean(tf.pow(out, 2)) - 1
grads = tape.gradient(loss, net.trainable_variables)
optimizer.apply_gradients(zip(grads, net.trainable_variables))
print("backward(10 steps):", net.variables)
| 30.030303 | 81 | 0.71443 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 269 | 0.266601 |
4b2b18a133cf43bd4b7425912dff7947dc039608 | 1,001 | py | Python | pytocl/main.py | moltob/pytocl | 905c09e649feca8feeaef6fdecd6767d82cdb28e | [
"MIT"
] | 12 | 2016-11-02T08:43:04.000Z | 2020-05-17T11:23:32.000Z | pytocl/main.py | moltob/pytocl | 905c09e649feca8feeaef6fdecd6767d82cdb28e | [
"MIT"
] | 1 | 2020-06-08T09:48:20.000Z | 2020-06-08T09:48:20.000Z | pytocl/main.py | moltob/pytocl | 905c09e649feca8feeaef6fdecd6767d82cdb28e | [
"MIT"
] | 3 | 2017-08-01T18:30:32.000Z | 2018-08-04T13:10:15.000Z | """Application entry point."""
import argparse
import logging
from pytocl.protocol import Client
def main():
"""Main entry point of application."""
parser = argparse.ArgumentParser(description='Client for TORCS racing car simulation with SCRC '
'network server.')
parser.add_argument('--hostname', help='Racing server host name.', default='localhost')
parser.add_argument('--port', help='Port to connect, 3001 - 3010 for clients 1 - 10.',
type=int, default=3001)
parser.add_argument('-v', help='Debug log level.', action='store_true')
args = parser.parse_args()
# switch log level:
if args.v:
level = logging.DEBUG
else:
level = logging.INFO
del args.v
logging.basicConfig(level=level, format="%(asctime)s %(levelname)7s %(name)s %(message)s")
# start client loop:
client = Client(**args.__dict__)
client.run()
if __name__ == '__main__':
main()
| 30.333333 | 100 | 0.622378 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 375 | 0.374625 |
4b2b25ace759328c89bdd5c3d6fc4d697b6531e4 | 2,381 | py | Python | JSMultiline.py | axilleasiv/JSMultiline | 59779d4b6c444461597b105e31aa0efb0e86805c | [
"MIT"
] | 6 | 2015-05-04T00:05:00.000Z | 2016-12-09T14:40:47.000Z | JSMultiline.py | axilleasiv/JSMultiline | 59779d4b6c444461597b105e31aa0efb0e86805c | [
"MIT"
] | 1 | 2018-06-25T17:13:37.000Z | 2018-06-25T17:13:37.000Z | JSMultiline.py | axilleasiv/JSMultiline | 59779d4b6c444461597b105e31aa0efb0e86805c | [
"MIT"
] | null | null | null | import sublime
import sublime_plugin
import re
import os
rexLastTabs = re.compile(r'(\t+|\s+)$', re.MULTILINE)
rexEmptyLines = re.compile('^[ \t]*$\r?\n', re.MULTILINE)
rexCont = re.compile(r'[^\t\s].*[^\t\s]')
rexFormatted = re.compile(r"((?<=\s)'|(?<=\t)')|('*\s[\+|\\|])")
class RunMultilineAction(sublime_plugin.TextCommand):
def run(self, edit, action=None):
if not is_js_buffer(self.view):
sublime.status_message('Multiline: Not supported format.')
return False
for region in self.view.sel():
if region.empty():
continue
text = self.view.substr(region)
formatted = self.checkFormat(text)
if formatted:
replacement = formatted
else:
text = re.sub(r"'", '"', text)
replacement = self.format( rexEmptyLines.sub('', text), action )
self.view.replace(edit, region, replacement)
sublime.status_message('Multiline: Formatting is done.')
def checkFormat(self, text):
formatted = False
# only one line formatted
if text.find('\n') == -1 and (text.endswith("';") or text.endswith("\\")):
return text[1: len(text) -2]
if rexFormatted.search( text ):
formatted = rexFormatted.sub('', text)
formatted =formatted[1: len(formatted) -2]
return formatted
def format(self, text, action=None):
lines = text.split('\n')
symbol = action == 'plus' and '+' or r'\\'
quote = action == 'plus' and "'" or ""
for index in range(len(lines)):
lines[index] = rexLastTabs.sub('', lines[index])
if index == len(lines) - 1:
lines[index] = rexCont.sub( quote + rexCont.search( lines[index] ).group() + "';", lines[index])
elif index == 0 and action == 'slash':
lines[index] = rexCont.sub( "'" + rexCont.search( lines[index] ).group() + " " + symbol, lines[index])
else:
lines[index] = rexCont.sub( quote + rexCont.search( lines[index] ).group() + quote + " " + symbol, lines[index])
return '\n'.join(lines)
#https://github.com/jdc0589/JsFormat line 47
def is_js_buffer(view):
fName = view.file_name()
vSettings = view.settings()
syntaxPath = vSettings.get('syntax')
syntax = ""
ext = ""
if (fName != None): # file exists, pull syntax type from extension
ext = os.path.splitext(fName)[1][1:]
if(syntaxPath != None):
syntax = os.path.splitext(syntaxPath)[0].split('/')[-1].lower()
return ext in ['js', 'json'] or "javascript" in syntax or "json" in syntax | 28.345238 | 116 | 0.639227 | 1,639 | 0.688366 | 0 | 0 | 0 | 0 | 0 | 0 | 386 | 0.162117 |