max_stars_repo_path
stringlengths 4
277
| max_stars_repo_name
stringlengths 4
130
| max_stars_count
int64 0
191k
| id
stringlengths 1
8
| content
stringlengths 1
996k
| score
float64 -1.25
4.06
| int_score
int64 0
4
|
---|---|---|---|---|---|---|
app/targetbalance.py | woudt/bunq2ifttt | 27 | 12799519 | """
Target balance
Handles the target balance internal/external actions
"""
import json
import uuid
from flask import request
import bunq
import payment
def target_balance_internal():
""" Execute a target balance internal action """
data = request.get_json()
print("[target_balance_internal] input: {}".format(json.dumps(data)))
if "actionFields" not in data:
errmsg = "missing actionFields"
print("[target_balance_internal] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
fields = data["actionFields"]
errmsg = check_fields(True, fields)
if errmsg:
print("[target_balance_internal] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
# the account NL42BUNQ0123456789 is used for test payments
if fields["account"] == "NL42BUNQ0123456789":
return json.dumps({"data": [{"id": uuid.uuid4().hex}]})
# retrieve balance
config = bunq.retrieve_config()
if fields["payment_type"] == "DIRECT":
balance = get_balance(config, fields["account"],
fields["other_account"])
if isinstance(balance, tuple):
balance, balance2 = balance
transfer_amount = fields["amount"] - balance
if transfer_amount > balance2:
transfer_amount = balance2
else:
balance = get_balance(config, fields["account"])
if isinstance(balance, float):
transfer_amount = fields["amount"] - balance
if isinstance(balance, str):
errmsg = balance
print("[target_balance_internal] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
# construct payment message
if "{:.2f}".format(fields["amount"]) == "0.00":
errmsg = "No transfer needed, balance already ok"
print("[target_balance_internal] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
if transfer_amount > 0 and "top up" in fields["direction"]:
paymentmsg = {
"amount": {
"value": "{:.2f}".format(transfer_amount),
"currency": "EUR"
},
"counterparty_alias": {
"type": "IBAN",
"value": fields["account"],
"name": "x"
},
"description": fields["description"]
}
account = fields["other_account"]
elif transfer_amount < 0 and "skim" in fields["direction"]:
paymentmsg = {
"amount": {
"value": "{:.2f}".format(-transfer_amount),
"currency": "EUR"
},
"counterparty_alias": {
"type": "IBAN",
"value": fields["other_account"],
"name": "x"
},
"description": fields["description"]
}
account = fields["account"]
else:
errmsg = "No transfer needed, balance already ok"
print("[target_balance_internal] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
print(paymentmsg)
# get id and check permissions
if fields["payment_type"] == "DIRECT":
accid, enabled = payment.check_source_account(True, False, config,
account)
else:
accid, enabled = payment.check_source_account(False, True, config,
account)
if accid is None:
errmsg = "unknown account: "+account
if not enabled:
errmsg = "Payment type not enabled for account: "+account
if errmsg:
print("[target_balance_internal] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
# execute the payment
if fields["payment_type"] == "DIRECT":
result = bunq.post("v1/user/{}/monetary-account/{}/payment"
.format(config["user_id"], accid), paymentmsg)
else:
paymentmsg = {"number_of_required_accepts": 1, "entries": [paymentmsg]}
result = bunq.post("v1/user/{}/monetary-account/{}/draft-payment"
.format(config["user_id"], accid), paymentmsg)
print(result)
if "Error" in result:
return json.dumps({"errors": [{
"status": "SKIP",
"message": result["Error"][0]["error_description"]
}]}), 400
return json.dumps({"data": [{
"id": str(result["Response"][0]["Id"]["id"])}]})
def target_balance_external():
""" Execute a target balance external action """
data = request.get_json()
print("[target_balance_external] input: {}".format(json.dumps(data)))
if "actionFields" not in data:
errmsg = "missing actionFields"
print("[target_balance_external] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
fields = data["actionFields"]
errmsg = check_fields(False, fields)
if errmsg:
print("[target_balance_external] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
# the account NL42BUNQ0123456789 is used for test payments
if fields["account"] == "NL42BUNQ0123456789":
return json.dumps({"data": [{"id": uuid.uuid4().hex}]})
# retrieve balance
config = bunq.retrieve_config()
balance = get_balance(config, fields["account"])
if isinstance(balance, str):
errmsg = balance
print("[target_balance_external] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
transfer_amount = fields["amount"] - balance
# check for zero transfer
if "{:.2f}".format(fields["amount"]) == "0.00":
errmsg = "No transfer needed, balance already ok"
print("[target_balance_external] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
# get account id and check permission
if transfer_amount > 0:
accid = None
for acc in config["accounts"]:
if acc["iban"] == fields["account"]:
accid = acc["id"]
enabled = False
if "permissions" in config:
if fields["account"] in config["permissions"]:
if "PaymentRequest" in config["permissions"]\
[fields["account"]]:
enabled = config["permissions"][fields["account"]]\
["PaymentRequest"]
else:
accid, enabled = payment.check_source_account(False, True, config,
fields["account"])
if accid is None:
errmsg = "unknown account: "+fields["account"]
if not enabled:
errmsg = "Not permitted for account: "+fields["account"]
if errmsg:
print("[target_balance_external] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
# send request / execute payment
if transfer_amount > 0 and "top up" in fields["direction"]:
bmvalue = fields["request_phone_email_iban"].replace(" ", "")
if "@" in bmvalue:
bmtype = "EMAIL"
elif bmvalue[:1] == "+" and bmvalue[1:].isdecimal():
bmtype = "PHONE_NUMBER"
elif bmvalue[:2].isalpha() and bmvalue[2:4].isdecimal():
bmtype = "IBAN"
else:
errmsg = "Unrecognized as email, phone or iban: "+bmvalue
print("[request_inquiry] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message":\
errmsg}]}), 400
msg = {
"amount_inquired": {
"value": "{:.2f}".format(transfer_amount),
"currency": "EUR",
},
"counterparty_alias": {
"type": bmtype,
"name": bmvalue,
"value": bmvalue
},
"description": fields["request_description"],
"allow_bunqme": True,
}
print(json.dumps(msg))
config = bunq.retrieve_config()
result = bunq.post("v1/user/{}/monetary-account/{}/request-inquiry"\
.format(config["user_id"], accid), msg, config)
elif transfer_amount < 0 and "skim" in fields["direction"]:
paymentmsg = {
"amount": {
"value": "{:.2f}".format(-transfer_amount),
"currency": "EUR"
},
"counterparty_alias": {
"type": "IBAN",
"value": fields["payment_account"],
"name": fields["payment_name"]
},
"description": fields["payment_description"]
}
print(paymentmsg)
paymentmsg = {"number_of_required_accepts": 1, "entries": [paymentmsg]}
result = bunq.post("v1/user/{}/monetary-account/{}/draft-payment"
.format(config["user_id"], accid), paymentmsg)
else:
errmsg = "No transfer needed, balance already ok"
print("[target_balance_external] ERROR: "+errmsg)
return json.dumps({"errors": [{"status": "SKIP", "message": errmsg}]})\
, 400
print(result)
if "Error" in result:
return json.dumps({"errors": [{
"status": "SKIP",
"message": result["Error"][0]["error_description"]
}]}), 400
return json.dumps({"data": [{
"id": str(result["Response"][0]["Id"]["id"])}]})
def check_fields(internal, fields):
""" Check the fields """
# check expected fields
if internal:
expected_fields = ["account", "amount", "other_account", "direction",
"payment_type", "description"]
else:
expected_fields = ["account", "amount", "direction", "payment_account",
"payment_name", "payment_description",
"request_phone_email_iban", "request_description"]
for field in expected_fields:
if field not in fields:
return "missing field: "+field
# strip spaces from account numbers
fields["account"] = fields["account"].replace(" ", "")
if internal:
fields["other_account"] = fields["other_account"].replace(" ", "")
else:
fields["payment_account"] = fields["payment_account"].replace(" ", "")
# check amount
try:
orig = fields["amount"]
fields["amount"] = float(fields["amount"])
except ValueError:
fields["amount"] = -1
if fields["amount"] <= 0:
return "only positive amounts allowed: "+orig
return None
def get_balance(config, account, account2=None):
""" Retrieve the balance of one or two accounts """
balances = bunq.retrieve_account_balances(config)
if account2 is None and account in balances:
return balances[account]
if account in balances and account2 in balances:
return balances[account], balances[account2]
if account not in balances:
return "Account balance not found "+account
return "Account balance not found "+account2
| 1.9375 | 2 |
auth_custom/apps.py | u-transnet/utransnet-gateway | 0 | 12799527 | from django.apps import AppConfig
class AuthCustomConfig(AppConfig):
name = 'auth_custom'
| 0.722656 | 1 |
labs/lab-1/exercises/lab-1.1-introduction-to-tensorflow/1.1-introduction-to-tensorflow.py | rubenandrebarreiro/fct-nova-deep-learning-labs | 1 | 12799551 | """
Lab 1.1 - Introduction to TensorFlow
Author:
- <NAME> (<EMAIL>)
- <NAME> (<EMAIL>)
"""
# Import the Libraries and Packages
# Import the Operative System Library as operative_system
import os as operative_system
# Disable all the Debugging Logs from TensorFlow Library
operative_system.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# Import the TensorFlow Library as tensorflow alias
import tensorflow as tensorflow
# Constants
LOGGING_FLAG = True
# Create the constant "a" as a float of 32 bits and
# assign to it the value 3
a = tensorflow.constant(3.0, dtype=tensorflow.float32, name="a")
# Create the constant "b" and assign to it the value 4
b = tensorflow.constant(4.0, name="b")
# Create the addition of the constants "a" and "b", as "total",
# i.e., total = a + b
total = tensorflow.add(a, b, name="total")
# If the Logging Flag is set to True
if LOGGING_FLAG:
# Print the header for the Logging
tensorflow.print("\n\nLogging of the Execution:\n")
# Print the Tensor for the constant "a"
tensorflow.print("a = ", a)
# Print the Tensor for the constant "b"
tensorflow.print("b = ", b)
# Print the Tensor for the addition of
# the constants "a" and "b", as "total"
tensorflow.print("total = a + b = ", total)
| 2.546875 | 3 |
setup.py | pingf/yadashcomp | 0 | 12799559 | from setuptools import setup
exec (open('yadashcomp/version.py').read())
setup(
name='yadashcomp',
version=__version__,
author='pingf',
packages=['yadashcomp'],
include_package_data=True,
license='MIT',
description='yet another dash components',
install_requires=[]
)
| 0.898438 | 1 |
matplotlib/two_line_in_same_plot.py | abhayanigam/Learn_Python_Programming | 1 | 12799607 | import numpy as np
from matplotlib import pyplot as plt
p = np.array([0,10])
p2 = p*2
p3 = p*3
plt.plot(p,p2,color = 'b',ls = '-.',linewidth = 2)
plt.plot(p,p3,color = 'y',ls = '-',linewidth = 3)
plt.title("Two line in same plot")
plt.xlabel("X-Axis")
plt.ylabel("Y-Axis")
plt.show() | 2.359375 | 2 |
run_http_measurements.py | kosekmi/quic-opensand-emulation | 0 | 12799639 | # Original script: https://github.com/Lucapaulo/web-performance/blob/main/run_measurements.py
import re
import time
import selenium.common.exceptions
from selenium import webdriver
from selenium.webdriver.chrome.options import Options as chromeOptions
import sys
from datetime import datetime
import hashlib
import uuid
import os
import csv
# performance elements to extract
measurement_elements = ('protocol', 'server', 'domain', 'timestamp', 'connectEnd', 'connectStart', 'domComplete',
'domContentLoadedEventEnd', 'domContentLoadedEventStart', 'domInteractive', 'domainLookupEnd',
'domainLookupStart', 'duration', 'encodedBodySize', 'decodedBodySize', 'transferSize',
'fetchStart', 'loadEventEnd', 'loadEventStart', 'requestStart', 'responseEnd', 'responseStart',
'secureConnectionStart', 'startTime', 'firstPaint', 'firstContentfulPaint', 'nextHopProtocol', 'cacheWarming', 'error')
file_elements = ('pep', 'run')
# retrieve input params
try:
protocol = sys.argv[1]
server = sys.argv[2]
chrome_path = sys.argv[3]
output_dir = sys.argv[4]
file_elements_values = sys.argv[5].split(';')
except IndexError:
print("Input params incomplete (protocol, server, chrome_driver, output_dir)")
sys.exit(1)
if len(file_elements) != len(file_elements_values):
print("Number of file elements does not match")
sys.exit(1)
# Chrome options
chrome_options = chromeOptions()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-dev-shm-usage')
if protocol == 'quic':
chrome_options.add_argument('--enable-quic')
chrome_options.add_argument('--origin-to-force-quic-on=example.com:443')
chrome_options.add_argument('--allow_unknown_root_cer')
chrome_options.add_argument('--disable_certificate_verification')
chrome_options.add_argument('--ignore-urlfetcher-cert-requests')
chrome_options.add_argument(f"--host-resolver-rules=MAP example.com {server}")
chrome_options.add_argument('--verbose')
chrome_options.add_argument('--disable-http-cache')
# Function to create: openssl x509 -pubkey < "pubkey.pem" | openssl pkey -pubin -outform der | openssl dgst -sha256 -binary | base64 > "fingerprints.txt"
chrome_options.add_argument('--ignore-certificate-errors-spki-list=D29LAH0IMcLx/d7R2JAH5bw/YKYK9uNRYc6W0/GJlS8=')
def create_driver():
return webdriver.Chrome(options=chrome_options, executable_path=chrome_path)
def get_page_performance_metrics(driver, page):
script = """
// Get performance and paint entries
var perfEntries = performance.getEntriesByType("navigation");
var paintEntries = performance.getEntriesByType("paint");
var entry = perfEntries[0];
var fpEntry = paintEntries[0];
var fcpEntry = paintEntries[1];
// Get the JSON and first paint + first contentful paint
var resultJson = entry.toJSON();
resultJson.firstPaint = 0;
resultJson.firstContentfulPaint = 0;
try {
for (var i=0; i<paintEntries.length; i++) {
var pJson = paintEntries[i].toJSON();
if (pJson.name == 'first-paint') {
resultJson.firstPaint = pJson.startTime;
} else if (pJson.name == 'first-contentful-paint') {
resultJson.firstContentfulPaint = pJson.startTime;
}
}
} catch(e) {}
return resultJson;
"""
try:
driver.set_page_load_timeout(60)
if protocol == 'quic':
driver.get(f'https://{page}')
else:
driver.get(f'http://{page}')
return driver.execute_script(script)
except selenium.common.exceptions.WebDriverException as e:
return {'error': str(e)}
def perform_page_load(page, cache_warming=0):
driver = create_driver()
timestamp = datetime.now()
performance_metrics = get_page_performance_metrics(driver, page)
# insert page into database
if 'error' not in performance_metrics:
# Print page source
# print(driver.page_source)
driver.save_screenshot(f'{output_dir}/screenshot.png')
insert_performance(page, performance_metrics, timestamp, cache_warming=cache_warming)
else:
insert_performance(page, {k: 0 for k in measurement_elements}, timestamp, cache_warming=cache_warming,
error=performance_metrics['error'])
driver.quit()
def create_measurements_table():
new = False
global local_csvfile
file_path = f'{output_dir}/http.csv' if file_elements_values[0] == 'false' else f'{output_dir}/http_pep.csv'
if os.path.isfile(file_path):
local_csvfile = open(file_path, mode='a')
else:
local_csvfile = open(file_path, mode='w')
new = True
global csvfile
csvfile = csv.writer(local_csvfile, delimiter=';')
if new == True:
headers = file_elements + measurement_elements
csvfile.writerow(headers)
def insert_performance(page, performance, timestamp, cache_warming=0, error=''):
performance['protocol'] = protocol
performance['server'] = server
performance['domain'] = page
performance['timestamp'] = timestamp
performance['cacheWarming'] = cache_warming
performance['error'] = error
values = file_elements_values.copy()
for m_e in measurement_elements:
values.append(performance[m_e])
csvfile.writerow(values)
create_measurements_table()
# performance measurement
perform_page_load("example.com")
local_csvfile.close()
| 1.632813 | 2 |
backends/redis.py | iliadmitriev/auth-api | 3 | 12799655 | import aioredis
async def init_redis(app):
app['redis'] = aioredis.from_url(
app['redis_location'],
)
async def close_redis(app):
await app['redis'].close()
def setup_redis(app, redis_location):
app['redis_location'] = redis_location
app.on_startup.append(init_redis)
app.on_cleanup.append(close_redis)
async def get_redis_key(redis, key):
async with redis.client() as conn:
val = await conn.get(key)
return val
async def set_redis_key(redis, key, value, expire=None):
async with redis.client() as conn:
if expire is None:
res = await conn.set(key, value)
else:
res = await conn.set(key, value, ex=expire)
return res
| 1.390625 | 1 |
gpu_tasker/email_settings_sample.py | cnstark/awesome_gpu_scheduler | 35 | 12799663 | EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.163.com'
EMAIL_PORT = 465
EMAIL_HOST_USER = '<EMAIL>'
EMAIL_HOST_PASSWORD = '<PASSWORD>'
EMAIL_USE_SSL = True
EMAIL_USE_LOCALTIME = True
DEFAULT_FROM_EMAIL = 'GPUTasker<{}>'.format(EMAIL_HOST_USER)
SERVER_EMAIL = EMAIL_HOST_USER
| 0.660156 | 1 |
boo/okved/__init__.py | vishalbelsare/boo | 14 | 12799671 | from .okved import all_codes_v2, name_v2
| -0.103027 | 0 |
output/models/ms_data/regex/re_c43_xsd/__init__.py | tefra/xsdata-w3c-tests | 1 | 12799703 | from output.models.ms_data.regex.re_c43_xsd.re_c43 import (
Regex,
Doc,
)
__all__ = [
"Regex",
"Doc",
]
| -0.099121 | 0 |
handover/evaluation/test_cases/test_58_std.py | CN-UPB/sharp | 2 | 12799735 | from ._base import *
class TestCase003(TestCase):
def __init__(self):
TestCase.__init__(self,
id='003',
alt_id='fixed_pps_increasing_pps_58_bytes',
description='Fixed PPS. Increasing State Transfer Duration. 58 Byte Packets',
pps=1000,
packet_size=58,
state_duration=[0, 1, 0.1],
reports=STD_REPORTS)
| 0.992188 | 1 |
src/pynn/bin/average_state.py | enesyugan/yapay-nn | 0 | 12799743 | #!/usr/bin/env python3
# encoding: utf-8
# Copyright 2019 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License")
import os, glob
import copy
import argparse
import torch
from pynn.util import load_object_param
parser = argparse.ArgumentParser(description='pynn')
parser.add_argument('--model-path', help='model saving path', default='model')
parser.add_argument('--config', help='model config', default='model.cfg')
parser.add_argument('--states', help='model states', default='ALL')
parser.add_argument('--save-all', help='save configuration as well', action='store_true')
if __name__ == '__main__':
args = parser.parse_args()
model, cls, module, m_params = load_object_param(args.model_path + '/' + args.config)
ext = copy.deepcopy(model)
if args.states == 'ALL':
states = [s for s in glob.glob("%s/epoch-*.pt" % args.model_path)]
else:
states = args.states.split(',')
states = ["%s/epoch-%s.pt" % (args.model_path, s) for s in states]
state = states[0]
model.load_state_dict(torch.load(state, map_location='cpu'))
params = list(model.parameters())
for state in states[1:]:
ext.load_state_dict(torch.load(state, map_location='cpu'))
eparams = list(ext.parameters())
for i in range(len(params)):
params[i].data.add_(eparams[i].data)
scale = 1.0 / len(states)
for p in params: p.data.mul_(scale)
state = model.state_dict()
if not args.save_all:
model_file = '%s/epoch-avg.pt' % args.model_path
torch.save(state, model_file)
else:
dic = {'params': m_params, 'class': cls, 'module': module, 'state': state}
torch.save(dic, '%s/epoch-avg.dic' % args.model_path)
| 1.375 | 1 |
py/path/svn/testing/svntestbase.py | woodrow/pyoac | 1 | 12799767 | import sys
import py
from py import path, test, process
from py.__.path.testing.fscommon import CommonFSTests, setuptestfs
from py.__.path.svn import cache, svncommon
mypath = py.magic.autopath()
repodump = mypath.dirpath('repotest.dump')
def getsvnbin():
svnbin = py.path.local.sysfind('svn')
if svnbin is None:
py.test.skip("svn binary not found")
return svnbin
# make a wc directory out of a given root url
# cache previously obtained wcs!
#
def getrepowc(reponame='basetestrepo', wcname='wc'):
repo = py.test.ensuretemp(reponame)
wcdir = py.test.ensuretemp(wcname)
if not repo.listdir():
#assert not wcdir.check()
repo.ensure(dir=1)
py.process.cmdexec('svnadmin create "%s"' %
svncommon._escape_helper(repo))
py.process.cmdexec('svnadmin load -q "%s" <"%s"' %
(svncommon._escape_helper(repo), repodump))
print "created svn repository", repo
wcdir.ensure(dir=1)
wc = py.path.svnwc(wcdir)
if py.std.sys.platform == 'win32':
repo = '/' + str(repo).replace('\\', '/')
wc.checkout(url='file://%s' % repo)
print "checked out new repo into", wc
else:
print "using repository at", repo
wc = py.path.svnwc(wcdir)
return ("file://%s" % repo, wc)
def save_repowc():
repo, wc = getrepowc()
repo = py.path.local(repo[len("file://"):])
assert repo.check()
savedrepo = repo.dirpath('repo_save')
savedwc = wc.dirpath('wc_save')
repo.copy(savedrepo)
wc.localpath.copy(savedwc.localpath)
return savedrepo, savedwc
def restore_repowc((savedrepo, savedwc)):
repo, wc = getrepowc()
print repo
print repo[len("file://"):]
repo = py.path.local(repo[len("file://"):])
print repo
assert repo.check()
# repositories have read only files on windows
#repo.chmod(0777, rec=True)
repo.remove()
wc.localpath.remove()
savedrepo.move(repo)
savedwc.localpath.move(wc.localpath)
# create an empty repository for testing purposes and return the url to it
def make_test_repo(name="test-repository"):
repo = py.test.ensuretemp(name)
try:
py.process.cmdexec('svnadmin create %s' % repo)
except:
repo.remove()
raise
if sys.platform == 'win32':
repo = '/' + str(repo).replace('\\', '/')
return py.path.svnurl("file://%s" % repo)
class CommonSvnTests(CommonFSTests):
def setup_method(self, meth):
bn = meth.func_name
for x in 'test_remove', 'test_move', 'test_status_deleted':
if bn.startswith(x):
self._savedrepowc = save_repowc()
def teardown_method(self, meth):
x = getattr(self, '_savedrepowc', None)
if x is not None:
restore_repowc(x)
del self._savedrepowc
def test_propget(self):
url = self.root.join("samplefile")
value = url.propget('svn:eol-style')
assert value == 'native'
def test_proplist(self):
url = self.root.join("samplefile")
res = url.proplist()
assert res['svn:eol-style'] == 'native'
def test_info(self):
url = self.root.join("samplefile")
res = url.info()
assert res.size > len("samplefile") and res.created_rev >= 0
def test_log_simple(self):
url = self.root.join("samplefile")
logentries = url.log()
for logentry in logentries:
assert logentry.rev == 1
assert hasattr(logentry, 'author')
assert hasattr(logentry, 'date')
class CommonCommandAndBindingTests(CommonSvnTests):
def test_trailing_slash_is_stripped(self):
# XXX we need to test more normalizing properties
url = self.root.join("/")
assert self.root == url
#def test_different_revs_compare_unequal(self):
# newpath = self.root.new(rev=1199)
# assert newpath != self.root
def test_exists_svn_root(self):
assert self.root.check()
#def test_not_exists_rev(self):
# url = self.root.__class__(self.rooturl, rev=500)
# assert url.check(exists=0)
#def test_nonexisting_listdir_rev(self):
# url = self.root.__class__(self.rooturl, rev=500)
# raises(py.error.ENOENT, url.listdir)
#def test_newrev(self):
# url = self.root.new(rev=None)
# assert url.rev == None
# assert url.strpath == self.root.strpath
# url = self.root.new(rev=10)
# assert url.rev == 10
#def test_info_rev(self):
# url = self.root.__class__(self.rooturl, rev=1155)
# url = url.join("samplefile")
# res = url.info()
# assert res.size > len("samplefile") and res.created_rev == 1155
# the following tests are easier if we have a path class
def test_repocache_simple(self):
repocache = cache.RepoCache()
repocache.put(self.root.strpath, 42)
url, rev = repocache.get(self.root.join('test').strpath)
assert rev == 42
assert url == self.root.strpath
def test_repocache_notimeout(self):
repocache = cache.RepoCache()
repocache.timeout = 0
repocache.put(self.root.strpath, self.root.rev)
url, rev = repocache.get(self.root.strpath)
assert rev == -1
assert url == self.root.strpath
def test_repocache_outdated(self):
repocache = cache.RepoCache()
repocache.put(self.root.strpath, 42, timestamp=0)
url, rev = repocache.get(self.root.join('test').strpath)
assert rev == -1
assert url == self.root.strpath
def _test_getreporev(self):
""" this test runs so slow it's usually disabled """
old = cache.repositories.repos
try:
_repocache.clear()
root = self.root.new(rev=-1)
url, rev = cache.repocache.get(root.strpath)
assert rev>=0
assert url == svnrepourl
finally:
repositories.repos = old
#cache.repositories.put(svnrepourl, 1200, 0)
| 1.679688 | 2 |
contact_forms/contact/tests/test_config.py | uktrade/dit-contact-forms | 2 | 12799791 | from django.test import SimpleTestCase
from django.apps import apps
from contact.apps import ContactConfig
class ContactConfigTestCase(SimpleTestCase):
"""
Test app config
"""
def test_apps(self):
self.assertEqual(ContactConfig.name, "contact")
self.assertEqual(apps.get_app_config("contact").name, "contact")
| 1.398438 | 1 |
main_exe.py | pkudba/SCL | 7 | 12799823 | import os
import torch
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import yaml
from tqdm.auto import tqdm
from tensorboardX import SummaryWriter
from torch.utils.data.dataloader import DataLoader
| 0.957031 | 1 |
tests/trinity/core/p2p-proto/test_server.py | jin10086/py-evm | 0 | 12799831 | import asyncio
import pytest
import socket
from eth_keys import keys
from cancel_token import CancelToken
from eth.chains.ropsten import RopstenChain, ROPSTEN_GENESIS_HEADER
from eth.db.chain import ChainDB
from eth.db.backends.memory import MemoryDB
from p2p.auth import HandshakeInitiator, _handshake
from p2p.peer import (
PeerPool,
)
from p2p.kademlia import (
Node,
Address,
)
from trinity.protocol.eth.peer import ETHPeer
from trinity.server import Server
from tests.p2p.auth_constants import eip8_values
from tests.trinity.core.dumb_peer import DumbPeer
from tests.trinity.core.integration_test_helpers import FakeAsyncHeaderDB
def get_open_port():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("", 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
port = get_open_port()
NETWORK_ID = 99
SERVER_ADDRESS = Address('127.0.0.1', udp_port=port, tcp_port=port)
RECEIVER_PRIVKEY = keys.PrivateKey(eip8_values['receiver_private_key'])
RECEIVER_PUBKEY = RECEIVER_PRIVKEY.public_key
RECEIVER_REMOTE = Node(RECEIVER_PUBKEY, SERVER_ADDRESS)
INITIATOR_PRIVKEY = keys.PrivateKey(eip8_values['initiator_private_key'])
INITIATOR_PUBKEY = INITIATOR_PRIVKEY.public_key
INITIATOR_ADDRESS = Address('127.0.0.1', get_open_port() + 1)
INITIATOR_REMOTE = Node(INITIATOR_PUBKEY, INITIATOR_ADDRESS)
class MockPeerPool:
is_full = False
connected_nodes = {}
def __init__(self):
self._new_peers = asyncio.Queue()
async def start_peer(self, peer):
self.connected_nodes[peer.remote] = peer
self._new_peers.put_nowait(peer)
def is_valid_connection_candidate(self, node):
return True
def __len__(self):
return len(self.connected_nodes)
async def next_peer(self):
return await self._new_peers.get()
def get_server(privkey, address, peer_class):
base_db = MemoryDB()
headerdb = FakeAsyncHeaderDB(base_db)
chaindb = ChainDB(base_db)
chaindb.persist_header(ROPSTEN_GENESIS_HEADER)
chain = RopstenChain(base_db)
server = Server(
privkey,
address.tcp_port,
chain,
chaindb,
headerdb,
base_db,
network_id=NETWORK_ID,
peer_class=peer_class,
)
return server
@pytest.fixture
async def server():
server = get_server(RECEIVER_PRIVKEY, SERVER_ADDRESS, ETHPeer)
await asyncio.wait_for(server._start_tcp_listener(), timeout=1)
yield server
server.cancel_token.trigger()
await asyncio.wait_for(server._close_tcp_listener(), timeout=1)
@pytest.fixture
async def receiver_server_with_dumb_peer():
server = get_server(RECEIVER_PRIVKEY, SERVER_ADDRESS, DumbPeer)
await asyncio.wait_for(server._start_tcp_listener(), timeout=1)
yield server
server.cancel_token.trigger()
await asyncio.wait_for(server._close_tcp_listener(), timeout=1)
@pytest.mark.asyncio
async def test_server_incoming_connection(monkeypatch, server, event_loop):
# We need this to ensure the server can check if the peer pool is full for
# incoming connections.
monkeypatch.setattr(server, 'peer_pool', MockPeerPool())
use_eip8 = False
token = CancelToken("<PASSWORD>")
initiator = HandshakeInitiator(RECEIVER_REMOTE, INITIATOR_PRIVKEY, use_eip8, token)
reader, writer = await initiator.connect()
# Send auth init message to the server, then read and decode auth ack
aes_secret, mac_secret, egress_mac, ingress_mac = await _handshake(
initiator, reader, writer, token)
initiator_peer = ETHPeer(
remote=initiator.remote, privkey=initiator.privkey, reader=reader,
writer=writer, aes_secret=aes_secret, mac_secret=mac_secret,
egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=server.headerdb,
network_id=NETWORK_ID)
# Perform p2p/sub-proto handshake, completing the full handshake and causing a new peer to be
# added to the server's pool.
await initiator_peer.do_p2p_handshake()
await initiator_peer.do_sub_proto_handshake()
# wait for peer to be processed
await asyncio.wait_for(server.peer_pool.next_peer(), timeout=1)
assert len(server.peer_pool.connected_nodes) == 1
receiver_peer = list(server.peer_pool.connected_nodes.values())[0]
assert isinstance(receiver_peer, ETHPeer)
assert initiator_peer.sub_proto is not None
assert initiator_peer.sub_proto.name == receiver_peer.sub_proto.name
assert initiator_peer.sub_proto.version == receiver_peer.sub_proto.version
assert receiver_peer.privkey == RECEIVER_PRIVKEY
@pytest.mark.asyncio
async def test_peer_pool_connect(monkeypatch, event_loop, receiver_server_with_dumb_peer):
started_peers = []
async def mock_start_peer(peer):
nonlocal started_peers
started_peers.append(peer)
monkeypatch.setattr(receiver_server_with_dumb_peer, '_start_peer', mock_start_peer)
# We need this to ensure the server can check if the peer pool is full for
# incoming connections.
monkeypatch.setattr(receiver_server_with_dumb_peer, 'peer_pool', MockPeerPool())
pool = PeerPool(DumbPeer, FakeAsyncHeaderDB(MemoryDB()), NETWORK_ID, INITIATOR_PRIVKEY, tuple())
nodes = [RECEIVER_REMOTE]
await pool.connect_to_nodes(nodes)
# Give the receiver_server a chance to ack the handshake.
await asyncio.sleep(0.1)
assert len(started_peers) == 1
assert len(pool.connected_nodes) == 1
# Stop our peer to make sure its pending asyncio tasks are cancelled.
await list(pool.connected_nodes.values())[0].cancel()
| 1.5625 | 2 |
service1.py | theneon-Hacker/main_numProperty | 0 | 12799839 | from itertools import dropwhile
def roman(n):
if n > 0 and n < 3999:
ones = ["","I","II","III","IV","V","VI","VII","VIII","IX"]
tens = ["","X","XX","XXX","XL","L","LX","LXX","LXXX","XC"]
hunds = ["","C","CC","CCC","CD","D","DC","DCC","DCCC","CM"]
thounds = ["","M","MM","MMM","MMMM"]
t = thounds[n // 1000]
h = hunds[n // 100 % 10]
te = tens[n // 10 % 10]
o = ones[n % 10]
return t + h + te + o
else:
return " - "
def romanize(num):
if num < 0 or num > 3999:
return "Ваше число нельзя представить в римской системе счисления"
else:
return f"Число в римской системе счисления: {roman(num)}"
def bitize(num):
return f"Число в двоичной системе счисления: {str(bin(num))[2:]}"
def print_all(numData, num):
print('Число {}:'.format(num), end='\n\t')
for k, item in enumerate(numData):
print(numData[k], end='\n\t')
print('')
isS = rct = unus = isEx = suf = False
def formData(numData, MC, num):
global rct, unus, isEx
global isS, suf
if 'простое' in numData[1]:
isS = True
elif 'составное' in numData[1]:
isS = False
if ',' in numData:
rct = True
elif ',' not in numData:
rct = False
if '.' in numData:
unus = True
elif '.' not in numData:
unus = False
if '>' in numData:
isEx = True
if '>' not in numData:
isEx = False
if '<' in numData:
suf = True
else:
suf = False
datapiece = f'''
"Число %d":
"{MC.dividers()}",
"Число простое": {str(isS).lower()},
"Число является прямоугольным": {str(rct).lower()},
"Число - необычное": {str(unus).lower()},
"%s",
"Число избыточное": {str(isEx).lower()},
"Число недостаточное": {str(suf).lower()},
"{MC.repr_pow2()}",
"{MC.repr_sqrt2()}",
"Число в римской системе счисления": {romanize(num)[35:]},
"Число в двоичной системе счисления": {str(bin(num))[2:]}.
''' % (num, MC.smooth())
return datapiece
def check_savings(file, patterns_list):
with open(file, 'r') as f:
try:
all_ = f.read()
all_ = ''.join(all_)
all_ = all_.split('\n')
all_ = reversed(all_)
for line in all_:
for elem in patterns_list:
if elem in line: continue
else:
break
else:
character_map = {
}
for j in patterns_list[1:]:
character_map.update({ord(j): ''})
line = line.translate(character_map)
line = line.replace(f"{patterns_list[0]}", '')
line = line.lstrip(' \n').rstrip(' \n')
line = line.split(", ")
line = [i for i in map(int, line)]
return line
assert False
except:
return []
if __name__ == '__main__':
pass
| 2.0625 | 2 |
ndv_ru.py | nonameists/puls_test | 0 | 12799847 | import json
import re
import requests
from bs4 import BeautifulSoup as soup
DICT_KEYS = ['complex', 'type', 'phase', 'building', 'section', 'price_base',
'price_finished', 'price_sale', 'price_finished_sale', 'area',
'number', 'number_on_site', 'rooms', 'floor', 'in_sale',
'sale_status', 'finished', 'currency', 'ceil', 'article',
'finishing_name', 'furniture', 'furniture_price', 'plan',
'feature', 'view', 'euro_planning', 'sale', 'discount_percent',
'discount', 'comment']
class NdvParser:
def __init__(self):
self.session = requests.Session()
self.base_url = 'https://www.ndv.ru'
self.base_url_flats = 'https://www.ndv.ru/novostrojki/flats'
self.new_buildings_url = 'https://www.ndv.ru/novostrojki'
self.parser_dict = dict.fromkeys(DICT_KEYS)
self.objects_list = self._get_new_buildings(self.new_buildings_url)
def get_flats_data(self):
"""
Метод для получения данных о продаже квартир в новостройках
Возвращает список словарей с данными о квартирах
:return: list of dicts
"""
# исходный список объектов который будем возвращать
objects = []
raw_data = self.session.get(self.base_url_flats).content
content = soup(raw_data, 'html.parser')
# Поиск паджинатора на странице
pages = self._find_pagination(content)
if pages:
for i in range(1, pages+1):
page_url = self.base_url_flats + f'?page={i}'
raw_data = self.session.get(page_url).content
content = soup(raw_data, 'html.parser')
# добавляем(объединяем) в исходный список
objects.extend(self._write_flats_data(content))
else:
objects = self._write_flats_data(content)
return objects
def get_parking_data(self):
"""
Метод для получения данных о продаже парковочных мест
Возвращает список словарей с данными о парковочных местах
:return: list of dicts
"""
objects = []
# Итерируемся по списку ЖК
for item in self.objects_list:
# забираем имя ЖК и ссылку на его страницу. Добавляем к URL /parking
location, url = item
url += '/parking'
answer = self.session.get(url)
# проверка есть ли в продаже парковочне места. Если нет, берем следующий ЖК
if answer.status_code == 404:
continue
raw_data = answer.content
content = soup(raw_data, 'html.parser')
# Поиск кнопки <<Показать n предложений>>. Поиск кол-ва предложений о продаже
row = content.find('a', id='NewBuildingComplexUpdateButton').get_text(strip=True)
number = int(re.search('(?P<number>\d+)', row).group())
# Если страница есть, но в данный момент 0 предложений, берем следующий ЖК
if not number:
continue
# Поиск паджинатора на странице
pages = self._find_pagination(content)
if pages:
for i in range(1, pages+1):
page_url = url + f'?page={i}'
raw_data = self.session.get(page_url).content
content = soup(raw_data, 'html.parser')
# добавляем(объединяем) в исходный список
objects.extend(self._write_parking_data(content, location))
else:
objects.extend(self._write_parking_data(content, location))
return objects
def get_full_data(self, json_file=None):
"""
Метод парсит данные о квартирах в новостройках + данные о парковочных местах
Записывает полученные данные в json файл
:return: list of dicts - if json_file=None
:return: json_file - if json_file=True
"""
print('Starting data parsing...')
flats = self.get_flats_data()
parking = self.get_parking_data()
data_result = flats + parking
if json_file is None:
return data_result
else:
with open('ndv_ru.json', 'w') as file:
json.dump(data_result, file)
print('Success')
def _get_new_buildings(self, url):
"""
Метод возвращает список кортежей с именем ЖК и его URL
:param url: str
:return: list of tuples
[('Мкр. «Мегаполис»(Москва, ВАО, Салтыковская улица 8с22)','/novostrojki/zhk/mkr-megapolis')]
"""
objects = []
raw_data = self.session.get(url).content
content = soup(raw_data, 'html.parser')
# Поиск паджинатора на странице
pages = self._find_pagination(content)
if pages:
for i in range(1, pages + 1):
# добавляем ?page=n к URL
page_url = self.new_buildings_url + f'?page={i}'
raw_data = self.session.get(page_url).content
content = soup(raw_data, 'html.parser')
# добавляем(объединяем) в исходный список
objects.extend(self._get_objects(content))
else:
objects = self._get_objects(content)
return objects
def _get_objects(self, data):
"""
Функция принимает на вход объект класса bs4.BeautifulSoup.
Ищет название жк, регион и ссылку на объект ЖК
:param data: bs4.BeautifulSoup
:return: list of tuples
[('Мкр. «Мегаполис»(Москва, ВАО, Салтыковская улица 8с22)','/novostrojki/zhk/mkr-megapolis')]
"""
output = []
raw_data = data.find_all('div', {'class': 'tile__content'})
for item in raw_data:
name = item.select_one('a', {'class': 'tile__name'}).text.strip()
location = item.find('span', {'class': 'tile__location'}).get_text().strip()
urn = item.select_one('a', {'class': 'tile__name'}).get('href')
output.append((name + f'({location})', self.base_url + urn))
return output
def _find_pagination(self, data):
"""
Функция принимает на вход объект класса bs4.BeautifulSoup.
Производит поиск пагинатора. Если он есть то возвращает номер последней страницы.
:param data: bs4.BeautifulSoup
:return: int last page number or False
"""
pages = data.findAll('a', {'class': 'move-to-page'})
if pages:
last_page = int(pages[-2].text)
return last_page
return False
def _get_image(self, data):
"""
Метод для парсинга схемы квартиры
На вход принимает bs4.element.Tag. Производит поиск по div
классу tile__image. С помощью регулярного выражения забирает URL
:param data: bs4.element.Tag
:return: str (image src url)
"""
try:
plan = data.find('div', class_='tile__image')['data-deskstop']
plan = re.search("url\('(?P<url>\S+)'\)", plan).group('url')
if plan == '/img/new-design/no-image.svg':
return None
return plan
except AttributeError:
return None
def _get_complex(self, data):
"""
Метод для поиска имени ЖК и его региона
:param data: bs4.element.Tag
:return: str
"""
try:
complex = data.find(
'a',
class_='tile__resale-complex--link js_tile_complex_link'
).get_text(
strip=True
)
location = data.find('span', class_='tile__location').get_text(strip=True)
complex += f'({location})'
return complex
except AttributeError:
return None
def _get_phase(self, data):
"""
Метод для поиска очереди строительства
:param data: bs4.element.Tag
:return: str
"""
try:
phase = data.find('span', class_='tile__row--resale_date').get_text(strip=True)
return phase
except AttributeError:
return None
def _price_base(self, data):
"""
Метод для поиска цены квартиры
:param data: bs4.element.Tag
:return: str
"""
try:
price_base = data.find('span', class_='tile__price').get_text(strip=True)
price_base = int(''.join(price_base.split()[:3]))
return price_base
except AttributeError:
return None
def _get_complex_item(self, data):
"""
Метод для поиска информации о квартире
Поиск корпуса, секции, этажа и номера квартиры
Возвращает словарь с ключами ['section', 'floor', 'number', 'building']
:param data: bs4.element.Tag
:return: dict
"""
keys = ('section', 'floor', 'number', 'building')
result = dict.fromkeys(keys)
info = data.find_all('div', class_='tile__in-complex-item')
for item in info:
title = item.select_one('.tile__in-complex-title').get_text(strip=True).lower()
value = item.select_one('.tile__in-complex-value').get_text(strip=True)
if title == 'корпус':
result['building'] = value
elif title == 'секция':
result['section'] = value
elif title == 'этаж':
result['floor'] = value
elif title == 'номер':
result['number'] = value
return result
def _get_dimentions(self, data):
"""
Метод производит поиск кол-ва комнат в квартире, площади + определение типа апартаменты/квартира
:param data: bs4.element.Tag
:return: dict
"""
result = dict()
name = data.find('a', {'class': 'tile__name'}).get_text(strip=True)
result['area'] = float(name.split()[-1].replace('м²', '').replace(',', '.'))
if 'студия' in name.split()[0].lower():
result['rooms'] = 'studio'
else:
result['rooms'] = int(name.split('-')[0])
if 'апартамент' in name.lower():
result['type'] = 'apartment'
else:
result['type'] = 'flat'
return result
def _write_flats_data(self, data):
"""
Метод для записи данных о отдельной квартире в словарь
На вход принимает объект класса bs4.BeautifulSoup
:param data: bs4.BeautifulSoup
:return: list of dict
"""
result = []
# Поиск отдельных объектов объявлений на странице
raw_data = data.find_all('div', class_='tile__link js-tile-link')
# в цикле проходим по каждому объявлению
for item in raw_data:
# Бремем копию исходного словаря с ключами в который будем записывать данные
output = self.parser_dict.copy()
# записываем имя ЖК и его регион
output['complex'] = self._get_complex(item)
# записываем очередь строительства
output['phase'] = self._get_phase(item)
# записываем цену
output['price_base'] = self._price_base(item)
# записвыем ссылку на план объекта
output['plan'] = self._get_image(item)
# обновляем в словаре ключи с данными для корпуса, секции, этажа и номера квартиры
output.update(self._get_complex_item(item))
# обновляем в словаре ключи с данными для комнат в квартире, площади + типа квартиры
output.update(self._get_dimentions(item))
# добавляем словарь в список который будем возвращать
result.append(output)
return result
def _write_parking_data(self, data, location):
"""
Метод для записи данных о отдельном парковочном месте
На вход принимает объект класса bs4.BeautifulSoup
:param data: bs4.BeautifulSoup
:param location: str
:return: list of dicts
"""
result = []
# Поиск отдельных объектов парковочных мест на сранице ЖК
raw_data = data.find_all('a', class_='flats-table__row table-body--row')
# в цикле проходим по каждому парковочному месту
for item in raw_data:
# Бремем копию исходного словаря с ключами в который будем записывать данные
output = self.parser_dict.copy()
# записываем имя ЖК и регион
output['complex'] = location
# записываем данные о парковочном месте (площаь, корпус, секция, этаж, план)
output.update(self._get_parking_info(item))
# добавляем словарь в список который будем возвращать
result.append(output)
return result
def _get_parking_info(self, data):
"""
Метод для парсинга данных о парковочном месте
:param data: bs4.element.Tag
:return: dict
"""
plan_img = None
price_base = None
price_sale = None
building = None
area = None
section = None
floor = None
number = None
urn = data.get('href')
parking_url = self.base_url + urn
parking_data = soup(self.session.get(parking_url).content, 'html.parser')
# поиск номера парковочного места
raw_number = parking_data.find('meta', {'content': '10'})
if raw_number:
number = raw_number.previous.strip().split()[1].replace('№', '')
else:
try:
number = parking_data.find('h1', class_='title').get_text(strip=True).split()[2]
except AttributeError:
pass
# поиск ссылки на план
try:
plan_div = parking_data.find('div', {'id': 'plans_layout'})
plan_img = plan_div.find('img').get('src')
except AttributeError:
pass
# поиск цены (в том числе со скидкой)
try:
price_base = parking_data.find('span', class_='card__info-prices__price').get_text(strip=True)
price_base = int(price_base.split('руб.')[0].replace(' ', ''))
except AttributeError:
try:
price_base = parking_data.find('span', class_='card__info-prices__old').get_text(strip=True)
price_base = int(price_base.split('руб.')[0].replace(' ', ''))
price_sale = parking_data.find(
'span',
class_='card__info-prices__price card__info-prices--red'
).get_text(strip=True)
price_sale = int(price_sale.split('руб.')[0].replace(' ', ''))
except AttributeError:
pass
# парсинг данных о парковочном месте(метраж, копус, секцияб этаж)
parking_div_info = parking_data.find('div', class_='card__info-row card__info-row--settings')
parking_div_data = parking_div_info.find_all('div', class_='card__info-params__number')
# парсинг площади
try:
raw_area = parking_div_data[0].get_text(strip=True).split()[0]
area = float(raw_area.replace(',', '.'))
except (AttributeError, IndexError):
pass
# парсинг корпуса
try:
building = parking_div_data[1].get_text(strip=True)
except (AttributeError, IndexError):
pass
# парсинг секции
try:
section = parking_div_data[2].get_text(strip=True)
except (AttributeError, IndexError):
pass
# парсинг этажа
try:
floor = parking_div_data[3].get_text(strip=True)
except (AttributeError, IndexError):
pass
output_dict = {
'number': number,
'building': building,
'area': area,
'price_sale': price_sale,
'price_base': price_base,
'type': 'parking',
'plan': plan_img,
'section': section,
'floor': floor
}
return output_dict
if __name__ == '__main__':
ndv = NdvParser()
# Запускаем парсер на квартиры и машиноместа.
# Данные записываются в json файл
ndv.get_full_data(json_file=True)
| 1.789063 | 2 |
src/oci/adm/models/application_dependency_vulnerability_summary.py | pabs3/oci-python-sdk | 0 | 12799863 | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ApplicationDependencyVulnerabilitySummary(object):
"""
An Application Dependency Vulnerability represents a single dependency in our application.
An Application Dependency Vulnerability can be associated with eventual Vulnerabilities.
Each Application Dependency is uniquely defined by a nodeId and lists eventual dependencies that this element depends on.
"""
def __init__(self, **kwargs):
"""
Initializes a new ApplicationDependencyVulnerabilitySummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param gav:
The value to assign to the gav property of this ApplicationDependencyVulnerabilitySummary.
:type gav: str
:param node_id:
The value to assign to the node_id property of this ApplicationDependencyVulnerabilitySummary.
:type node_id: str
:param application_dependency_node_ids:
The value to assign to the application_dependency_node_ids property of this ApplicationDependencyVulnerabilitySummary.
:type application_dependency_node_ids: list[str]
:param vulnerabilities:
The value to assign to the vulnerabilities property of this ApplicationDependencyVulnerabilitySummary.
:type vulnerabilities: list[oci.adm.models.Vulnerability]
:param is_found_in_knowledge_base:
The value to assign to the is_found_in_knowledge_base property of this ApplicationDependencyVulnerabilitySummary.
:type is_found_in_knowledge_base: bool
"""
self.swagger_types = {
'gav': 'str',
'node_id': 'str',
'application_dependency_node_ids': 'list[str]',
'vulnerabilities': 'list[Vulnerability]',
'is_found_in_knowledge_base': 'bool'
}
self.attribute_map = {
'gav': 'gav',
'node_id': 'nodeId',
'application_dependency_node_ids': 'applicationDependencyNodeIds',
'vulnerabilities': 'vulnerabilities',
'is_found_in_knowledge_base': 'isFoundInKnowledgeBase'
}
self._gav = None
self._node_id = None
self._application_dependency_node_ids = None
self._vulnerabilities = None
self._is_found_in_knowledge_base = None
@property
def gav(self):
"""
**[Required]** Gets the gav of this ApplicationDependencyVulnerabilitySummary.
Unique Group Artifact Version (GAV) identifier (Group:Artifact:Version).
:return: The gav of this ApplicationDependencyVulnerabilitySummary.
:rtype: str
"""
return self._gav
@gav.setter
def gav(self, gav):
"""
Sets the gav of this ApplicationDependencyVulnerabilitySummary.
Unique Group Artifact Version (GAV) identifier (Group:Artifact:Version).
:param gav: The gav of this ApplicationDependencyVulnerabilitySummary.
:type: str
"""
self._gav = gav
@property
def node_id(self):
"""
**[Required]** Gets the node_id of this ApplicationDependencyVulnerabilitySummary.
Unique identifier of an Application Dependency node.
:return: The node_id of this ApplicationDependencyVulnerabilitySummary.
:rtype: str
"""
return self._node_id
@node_id.setter
def node_id(self, node_id):
"""
Sets the node_id of this ApplicationDependencyVulnerabilitySummary.
Unique identifier of an Application Dependency node.
:param node_id: The node_id of this ApplicationDependencyVulnerabilitySummary.
:type: str
"""
self._node_id = node_id
@property
def application_dependency_node_ids(self):
"""
**[Required]** Gets the application_dependency_node_ids of this ApplicationDependencyVulnerabilitySummary.
List of (Application Dependencies) node identifiers on which this node depends.
:return: The application_dependency_node_ids of this ApplicationDependencyVulnerabilitySummary.
:rtype: list[str]
"""
return self._application_dependency_node_ids
@application_dependency_node_ids.setter
def application_dependency_node_ids(self, application_dependency_node_ids):
"""
Sets the application_dependency_node_ids of this ApplicationDependencyVulnerabilitySummary.
List of (Application Dependencies) node identifiers on which this node depends.
:param application_dependency_node_ids: The application_dependency_node_ids of this ApplicationDependencyVulnerabilitySummary.
:type: list[str]
"""
self._application_dependency_node_ids = application_dependency_node_ids
@property
def vulnerabilities(self):
"""
**[Required]** Gets the vulnerabilities of this ApplicationDependencyVulnerabilitySummary.
List of vulnerabilities for the Application Dependency.
:return: The vulnerabilities of this ApplicationDependencyVulnerabilitySummary.
:rtype: list[oci.adm.models.Vulnerability]
"""
return self._vulnerabilities
@vulnerabilities.setter
def vulnerabilities(self, vulnerabilities):
"""
Sets the vulnerabilities of this ApplicationDependencyVulnerabilitySummary.
List of vulnerabilities for the Application Dependency.
:param vulnerabilities: The vulnerabilities of this ApplicationDependencyVulnerabilitySummary.
:type: list[oci.adm.models.Vulnerability]
"""
self._vulnerabilities = vulnerabilities
@property
def is_found_in_knowledge_base(self):
"""
**[Required]** Gets the is_found_in_knowledge_base of this ApplicationDependencyVulnerabilitySummary.
Indicates if the artifact is found in the knowledge base.
:return: The is_found_in_knowledge_base of this ApplicationDependencyVulnerabilitySummary.
:rtype: bool
"""
return self._is_found_in_knowledge_base
@is_found_in_knowledge_base.setter
def is_found_in_knowledge_base(self, is_found_in_knowledge_base):
"""
Sets the is_found_in_knowledge_base of this ApplicationDependencyVulnerabilitySummary.
Indicates if the artifact is found in the knowledge base.
:param is_found_in_knowledge_base: The is_found_in_knowledge_base of this ApplicationDependencyVulnerabilitySummary.
:type: bool
"""
self._is_found_in_knowledge_base = is_found_in_knowledge_base
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 1.320313 | 1 |
luizalabs/core/migrations/0011_auto_20190911_0550.py | LucasSRocha/django_rest_llabs | 0 | 12799871 | # Generated by Django 2.1.12 on 2019-09-11 05:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0010_auto_20190910_1022'),
]
operations = [
migrations.AlterUniqueTogether(
name='product',
unique_together={('wishlist', 'product_id')},
),
]
| 0.949219 | 1 |
goatools/ratio.py | ezequieljsosa/goatools | 0 | 12799927 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
__copyright__ = "Copyright (C) 2010-2016, <NAME> al., All rights reserved."
__author__ = "various"
from collections import defaultdict, Counter
def count_terms(geneset, assoc, obo_dag):
"""count the number of terms in the study group
"""
term_cnt = Counter()
for gene in (g for g in geneset if g in assoc):
for x in assoc[gene]:
if x in obo_dag:
term_cnt[obo_dag[x].id] += 1
return term_cnt
def get_terms(desc, geneset, assoc, obo_dag, log):
"""Get the terms in the study group
"""
term2itemids = defaultdict(set)
genes = [g for g in geneset if g in assoc]
for gene in genes:
for x in assoc[gene]:
if x in obo_dag:
term2itemids[obo_dag[x].id].add(gene)
log.write("{N:>6,} out of {M:>6,} {DESC} items found in association\n".format(
DESC=desc, N=len(genes), M=len(geneset)))
return term2itemids
def is_ratio_different(min_ratio, study_go, study_n, pop_go, pop_n):
"""
check if the ratio go /n is different between the study group and
the population
"""
if min_ratio is None:
return True
s = float(study_go) / study_n
p = float(pop_go) / pop_n
if s > p:
return s / p > min_ratio
return p / s > min_ratio
# Copyright (C) 2010-2016, <NAME> al., All rights reserved.
| 2.390625 | 2 |
test_sample_policies.py | donghun2018/adclick-simulator-v2 | 0 | 12799935 | """
Sample bid policy testing script
for ORF418 Spring 2019 course
"""
import numpy as np
import pandas as pd
def simulator_setup_1day():
"""
This is a tool to set up a simulator and problem definition (state set, action set, and attribute set)
:return: simulator, state set, action set, attribute set
"""
from ssa_sim_v2.simulator.modules.auctions.auctions_base_module import AuctionsPoissonModule
from ssa_sim_v2.simulator.modules.auction_attributes.auction_attributes_base_module import \
AuctionAttributesModule
from ssa_sim_v2.simulator.modules.vickrey_auction.vickrey_auction_module import VickreyAuctionModule
from ssa_sim_v2.simulator.modules.competitive_click_probability.competitive_click_probability_base_module import \
CompetitiveClickProbabilityTwoClassGeometricModule
from ssa_sim_v2.simulator.modules.competitive_clicks.competitive_clicks_base_module import \
CompetitiveClicksBinomialModule
from ssa_sim_v2.simulator.modules.conversion_rate.conversion_rate_base_module import ConversionRateFlatModule
from ssa_sim_v2.simulator.modules.conversions.conversions_base_module import ConversionsBinomialModule
from ssa_sim_v2.simulator.modules.revenue.revenue_base_module import RevenueGammaNoiseModule
from ssa_sim_v2.simulator.modules.competitive_cpc.competitive_cpc_base_module import CompetitiveCPCVickreyModule
from ssa_sim_v2.simulator.modules.auctions.auctions_date_how_module import AuctionsDateHoWModule
from ssa_sim_v2.simulator.modules.auction_attributes.auction_attributes_date_how_module import \
AuctionAttributesDateHoWModule
from ssa_sim_v2.simulator.modules.vickrey_auction.vickrey_auction_date_how_module import \
VickreyAuctionDateHoWModule
from ssa_sim_v2.simulator.modules.competitive_click_probability.competitive_click_probability_date_how_module import \
CompetitiveClickProbabilityDateHoWModule
from ssa_sim_v2.simulator.modules.competitive_clicks.competitive_clicks_date_how_module import \
CompetitiveClicksDateHoWModule
from ssa_sim_v2.simulator.modules.conversion_rate.conversion_rate_date_how_module import \
ConversionRateDateHoWModule
from ssa_sim_v2.simulator.modules.conversions.conversions_date_how_module import ConversionsDateHoWModule
from ssa_sim_v2.simulator.modules.revenue.revenue_date_how_module import RevenueDateHoWModule
from ssa_sim_v2.simulator.modules.competitive_cpc.competitive_cpc_date_how_module import \
CompetitiveCpcDateHoWModule
from ssa_sim_v2.simulator.competitive_date_how_simulator import CompetitiveDateHowSimulator
from ssa_sim_v2.simulator.state import StateSet
from ssa_sim_v2.simulator.action import ActionSet
from ssa_sim_v2.simulator.attribute import AttrSet
seed = 1111
date_from = "2018-01-01"
date_to = "2018-01-02"
tmp_df = pd.DataFrame(np.array(range(24)), columns=["hour_of_day"])
tmp_df["key"] = 1
dates = pd.DataFrame(pd.date_range(date_from, date_to), columns=["date"])
dates_list = dates["date"].tolist()
dates["key"] = 1
dates = pd.merge(dates, tmp_df, on=["key"], how="left") # columns: ['date', 'hour_of_day']
dates["hour_of_week"] = pd.to_datetime(dates["date"]).dt.dayofweek * 24 + dates["hour_of_day"]
dates["date"] = dates["date"].dt.strftime("%Y-%m-%d")
dates = dates[["date", "hour_of_week"]]
# Initialize state set
state_set = StateSet(["date", "how"], ["discrete", "discrete"],
[dates_list, list(range(168))])
# Initialize attribute set
names = ['gender', 'age']
vals = {'gender': ['M', 'F', 'U'],
'age': ['0-19', '20-29', '30-39', '40-49', '50-59', '60-69', '70-*']}
attr_set = AttrSet(names, vals)
attr_combinations = attr_set.get_all_attr_tuples()
# Initialize action set
action_set = ActionSet(attr_set, max_bid=9.99, min_bid=0.01, max_mod=9.0, min_mod=0.1)
def initialize_priors(params, base_class):
attr_combinations = list(attr_set.get_all_attr_tuples())
priors = dates.copy()
priors.loc[:, "prior"] = pd.Series([dict.fromkeys(attr_combinations, params)] * len(priors))
base_classes = dates.copy()
base_classes.loc[:, "base_class"] = base_class
return priors, base_classes
# Initialize auctions priors
module_class = AuctionsPoissonModule
Params = module_class.Params
params = Params(auctions=100)
priors = dates.copy()
priors.loc[:, "prior"] = [{(): params}] * len(priors)
base_classes = dates.copy()
base_classes.loc[:, "base_class"] = module_class
auctions_priors = priors
auctions_base_classes = base_classes
# Initialize auction_attributes priors
module_class = AuctionAttributesModule
Params = module_class.Params
params = Params(p=1.0) # Probabilities are normalized
auction_attributes_priors, auction_attributes_base_classes \
= initialize_priors(params, module_class)
# Initialize vickrey_auction priors
module_class = VickreyAuctionModule
Params = module_class.Params
params = Params()
vickrey_auction_priors, vickrey_auction_base_classes \
= initialize_priors(params, module_class)
# Initialize competitive_click_probability priors
module_class = CompetitiveClickProbabilityTwoClassGeometricModule
Params = module_class.Params
params = Params(n_pos=8, p=0.5, q=0.5, r_11=0.6, r_12=0.4, r_2=0.5)
competitive_click_probability_priors, competitive_click_probability_base_classes \
= initialize_priors(params, module_class)
# Initialize competitive_clicks priors
module_class = CompetitiveClicksBinomialModule
Params = module_class.Params
params = Params(noise_level=0.0, noise_type="multiplicative")
competitive_clicks_priors, competitive_clicks_base_classes \
= initialize_priors(params, module_class)
# Initialize conversion_rate priors
module_class = ConversionRateFlatModule
Params = module_class.Params
params = Params(cvr=0.02, noise_level=0.0, noise_type="multiplicative")
conversion_rate_priors, conversion_rate_base_classes \
= initialize_priors(params, module_class)
# Initialize conversions priors
module_class = ConversionsBinomialModule
Params = module_class.Params
params = Params(noise_level=0.0, noise_type="multiplicative")
conversions_priors, conversions_base_classes \
= initialize_priors(params, module_class)
# Initialize revenue priors
module_class = RevenueGammaNoiseModule
Params = module_class.Params
params = Params(avg_rpv=300.0, noise_level=100.0)
revenue_priors, revenue_base_classes = initialize_priors(
params, module_class)
# Initialize competitive_cpc priors
module_class = CompetitiveCPCVickreyModule
Params = module_class.Params
params = Params(n_pos=8, fee=0.01)
competitive_cpc_priors, competitive_cpc_base_classes = \
initialize_priors(params, module_class)
# Module setup for the simulator
mods = \
{"auctions": AuctionsDateHoWModule(auctions_priors,
auctions_base_classes,
seed),
"auction_attributes": AuctionAttributesDateHoWModule(auction_attributes_priors,
auction_attributes_base_classes,
seed),
"vickrey_auction": VickreyAuctionDateHoWModule(vickrey_auction_priors,
vickrey_auction_base_classes,
seed),
"competitive_click_probability": CompetitiveClickProbabilityDateHoWModule(
competitive_click_probability_priors,
competitive_click_probability_base_classes,
seed),
"competitive_clicks": CompetitiveClicksDateHoWModule(competitive_clicks_priors,
competitive_clicks_base_classes,
seed),
"conversion_rate": ConversionRateDateHoWModule(conversion_rate_priors,
conversion_rate_base_classes,
seed),
"conversions": ConversionsDateHoWModule(conversions_priors,
conversions_base_classes,
seed),
"revenue": RevenueDateHoWModule(revenue_priors,
revenue_base_classes,
seed),
"competitive_cpc": CompetitiveCpcDateHoWModule(competitive_cpc_priors,
competitive_cpc_base_classes,
seed)
}
simulator = CompetitiveDateHowSimulator(state_set, action_set, attr_set,
mods, date_from, date_to, income_share=1.0)
return simulator, state_set, action_set, attr_set
if __name__ == "__main__":
"""
This script shows how the bidding policies will interact with the simulator
The codes are written out for easier understanding and convenient debugging for your policies
"""
# import policy classes from files
from policy2019 import Policy2019
from policy_thompson import PolicyThompsonSamplingSI
# handy function that initializes all for you
simulator, state_set, action_set, attr_set = simulator_setup_1day()
# build "policies" list that contains all bidding policies
policy1 = Policy2019(state_set, action_set, attr_set, seed=1234) # this policy is a bare-bone sample policy that bids randomly without learning
policy2 = PolicyThompsonSamplingSI(state_set, action_set, attr_set, seed=1234)
policy2.initialize({"stp": {"cvr_default": 0.02, "rpv_default": 300.0}}) # this policy is one of production level policies that needs this extra step
policies = []
policies.append(policy1)
policies.append(policy2)
policies.append(Policy2019(state_set, action_set, attr_set, seed=9292)) # adding another policy2019 with different seed on-the-fly
# Simulator will run 24 steps (t=0,1,...,23) (corresponding to 1 simulated day)
T = 24 # note that this particular setup limits T up to 48. T>48 will cause an error.
for t in range(T):
s = simulator.state
print("t={} of {}".format(t, T))
print(" state={}".format(simulator.state))
actions = []
for p in policies:
pol_action = p.act(s) # each policy responds with a bid
actions.append(pol_action)
print(" Actions={}".format(actions))
results = simulator.step(actions)
for ix, p in enumerate(policies):
p.learn(s, results[ix]) # each policy will learn with result
# note that policy in index ix gets result in index ix. The results can be different
| 1.648438 | 2 |
unittest_reinvent/running_modes/transfer_learning_tests/test_link_invent_transfer_learning.py | lilleswing/Reinvent-1 | 183 | 12799943 | import shutil
import unittest
import os
from running_modes.configurations import TransferLearningLoggerConfig, GeneralConfigurationEnvelope
from running_modes.configurations.transfer_learning.link_invent_learning_rate_configuration import \
LinkInventLearningRateConfiguration
from running_modes.configurations.transfer_learning.link_invent_transfer_learning_configuration import \
LinkInventTransferLearningConfiguration
from running_modes.constructors.transfer_learning_mode_constructor import TransferLearningModeConstructor
from running_modes.utils import set_default_device_cuda
from running_modes.enums.logging_mode_enum import LoggingModeEnum
from running_modes.enums.running_mode_enum import RunningModeEnum
from reinvent_models.model_factory.enums.model_type_enum import ModelTypeEnum
from unittest_reinvent.fixtures.paths import MAIN_TEST_PATH, SMILES_SET_LINK_INVENT_PATH, LINK_INVENT_PRIOR_PATH
from unittest_reinvent.fixtures.utils import count_empty_files
class TestLinkInventTransferLearning(unittest.TestCase):
def setUp(self):
set_default_device_cuda()
lm_enum = LoggingModeEnum()
rm_enum = RunningModeEnum()
mt_enum = ModelTypeEnum()
self.workfolder = os.path.join(MAIN_TEST_PATH, mt_enum.LINK_INVENT + rm_enum.TRANSFER_LEARNING)
if not os.path.isdir(self.workfolder):
os.makedirs(self.workfolder)
self.log_dir = os.path.join(self.workfolder, "test_log")
log_config = TransferLearningLoggerConfig(logging_path=self.log_dir, recipient=lm_enum.LOCAL,
job_name="test_job")
self.lr_config = LinkInventLearningRateConfiguration()
self.parameters = LinkInventTransferLearningConfiguration(empty_model=LINK_INVENT_PRIOR_PATH,
output_path=self.workfolder,
input_smiles_path=SMILES_SET_LINK_INVENT_PATH,
validation_smiles_path=None,
num_epochs=2,
sample_size=10,
learning_rate=self.lr_config)
self.general_config = GeneralConfigurationEnvelope(model_type=mt_enum.LINK_INVENT, logging=vars(log_config),
run_type=rm_enum.TRANSFER_LEARNING, version="3.0",
parameters=vars(self.parameters))
self.runner = TransferLearningModeConstructor(self.general_config)
def tearDown(self):
if os.path.isdir(self.workfolder):
shutil.rmtree(self.workfolder)
def _model_saved_and_logs_exist(self):
self.assertTrue(os.path.isfile(os.path.join(self.workfolder, self.parameters.model_file_name)))
self.assertTrue(os.path.isdir(self.log_dir))
self.assertEqual(count_empty_files(self.log_dir), 0)
def test_no_validation(self):
self.parameters.validation_smiles_path = None
self.runner.run()
self._model_saved_and_logs_exist()
def test_with_validation(self):
self.parameters.validation_smiles_path = SMILES_SET_LINK_INVENT_PATH
self.runner.run()
self._model_saved_and_logs_exist()
| 1.34375 | 1 |
IMU/VTK-6.2.0/ThirdParty/Twisted/twisted/scripts/__init__.py | timkrentz/SunTracker | 4 | 12799959 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Subpackage containing the modules that implement the command line tools.
Note that these are imported by top-level scripts which are intended to be
invoked directly from a shell.
"""
from twisted.python.versions import Version
from twisted.python.deprecate import deprecatedModuleAttribute
deprecatedModuleAttribute(
Version("Twisted", 11, 1, 0),
"Seek unzipping software outside of Twisted.",
__name__,
"tkunzip")
deprecatedModuleAttribute(
Version("Twisted", 12, 1, 0),
"tapconvert has been deprecated.",
__name__,
"tapconvert")
del Version, deprecatedModuleAttribute
| 0.859375 | 1 |
CreateAccount.py | manijamali2003/Nava | 1 | 12799967 | #!/usr/bin/env python3
import os
import socket, random,hashlib
from Nava import *
HOST = '127.0.0.1' # Standard loopback interface address (localhost)
PORT = 65433 # Port to listen on (non-privileged ports are > 1023)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((HOST, PORT))
s.listen()
conn, addr = s.accept()
with conn:
print('Connected by', addr)
while True:
data = conn.recv(1024).decode('utf-8')
if data:
try:
split = data.split(',')
username = split[0] # manijamali2003
if not os.path.isfile (f'Etc/Users Account/{username}'):
fullname = split[1] # <NAME>
gender = split[2] # 0: Male, 1: Female
birthday = split[3] # yyyy/mm/dd
countryc = split[4] # IR
city = split[5] # Mashhad
zipcode = split[6] # 11111111
hashcode = split[7] # hash of password sha3_513
f = open(f'Etc/Users Account/{username}','wb')
f.write(f'{fullname},{gender},{birthday},{countryc},{city},{zipcode},{hashcode}'.encode())
f.close()
key = KeyCreator()
f = open(f'Etc/Users Account/Public Keys/{username}.pem','wb')
f.write(key.public) # you should create it
f.close()
conn.sendall(key.private)
conn.sendall(key.public)
else:
conn.sendall(b'e: account exists')
except:
conn.sendall(b'e: some errors') | 1.601563 | 2 |
src/tfchain/types/PrimitiveTypes.py | GlenDC/threefold-wallet-electron | 0 | 12799983 | import tfchain.errors as tferrors
import tfchain.polyfill.encoding.base64 as jsbase64
import tfchain.polyfill.encoding.hex as jshex
import tfchain.polyfill.encoding.str as jsstr
import tfchain.polyfill.encoding.decimal as jsdec
import tfchain.polyfill.array as jsarray
from tfchain.types.BaseDataType import BaseDataTypeClass
class BinaryData(BaseDataTypeClass):
"""
BinaryData is the data type used for any binary data used in tfchain.
"""
def __init__(self, value=None, fixed_size=None, strencoding=None):
# define string encoding
if strencoding != None and not isinstance(strencoding, str):
raise TypeError(
"strencoding should be None or a str, not be of type {}".format(strencoding))
if strencoding == None or jsstr.String(strencoding).lower().strip().__eq__('hex'):
self._from_str = lambda s: jshex.bytes_from_hex(s)
self._to_str = lambda value: jshex.bytes_to_hex(value)
elif jsstr.String(strencoding).lower().strip().__eq__('base64'):
self._from_str = lambda s: jsbase64.bytes_from_b64(s)
self._to_str = lambda value: jsbase64.bytes_to_b64(value)
elif jsstr.String(strencoding).lower().strip().__eq__('hexprefix'):
self._from_str = lambda s: jshex.bytes_from_hex(
s[2:] if (s.startswith("0x") or s.startswith("0X")) else s)
self._to_str = lambda value: '0x' + jshex.bytes_to_hex(value)
else:
raise TypeError(
"{} is not a valid string encoding".format(strencoding))
self._strencoding = strencoding
# define fixed size
if fixed_size != None:
if not isinstance(fixed_size, int):
raise TypeError(
"fixed size should be None or int, not be of type {}".format(type(fixed_size)))
if fixed_size < 0:
raise TypeError(
"fixed size should be at least 0, {} is not allowed".format(fixed_size))
if fixed_size != 0:
self._fixed_size = fixed_size
else:
self._fixed_size = None # for now use no fixed size
# define the value (finally)
self._value = None
self.value = value
if fixed_size == 0:
# define the fixed size now, if the fixed_size was 0
# based on the binary length of the value
self._fixed_size = len(self.value)
@classmethod
def from_json(cls, obj, fixed_size=None, strencoding=None):
if obj != None and not isinstance(obj, str):
raise TypeError(
"binary data is expected to be an encoded string when part of a JSON object")
if obj == '':
obj = None
return cls(value=obj, fixed_size=fixed_size, strencoding=strencoding)
@property
def value(self):
return self._value
@value.setter
def value(self, value):
# normalize the value
if isinstance(value, BinaryData):
value = value.value
elif value == None:
if self._fixed_size != None:
value = bytes(jsarray.new_array(self._fixed_size))
else:
value = bytes(jsarray.new_array(0))
elif isinstance(value, str):
value = self._from_str(value)
elif isinstance(value, bytearray):
value = bytes(value)
elif not isinstance(value, bytes) and not jsarray.is_uint8_array(value):
raise TypeError(
"binary data can only be set to a BinaryData, str, bytes or bytearray, not {}".format(type(value)))
# if fixed size, check this now
lvalue = len(value)
if self._fixed_size != None and lvalue != 0 and lvalue != self._fixed_size:
raise ValueError(
"binary data was expected to be of fixed size {}, length {} is not allowed".format(
self._fixed_size, len(value)))
# all good, assign the bytearray value
self._value = value
def __len__(self):
return len(self.value)
def __str__(self):
return self._to_str(self._value)
def str(self):
return self.__str__()
def __repr__(self):
return self.__str__()
def json(self):
return self.__str__()
def __eq__(self, other):
other = self._op_other_as_binary_data(other)
return self.value == other.value
def __ne__(self, other):
other = self._op_other_as_binary_data(other)
return self.value != other.value
def _op_other_as_binary_data(self, other):
if isinstance(other, (str, bytes, bytearray)):
other = BinaryData(
value=other, fixed_size=self._fixed_size, strencoding=self._strencoding)
elif not isinstance(other, BinaryData):
raise TypeError(
"Binary data of type {} is not supported".format(type(other)))
if self._fixed_size != other._fixed_size:
raise TypeError(
"Cannot compare binary data with different fixed size: self({}) != other({})".format(
self._fixed_size, other._fixed_size))
if self._strencoding != other._strencoding:
raise TypeError(
"Cannot compare binary data with different strencoding: self({}) != other({})".format(
self._strencoding, other._strencoding))
return other
def __hash__(self):
return hash(self.__str__())
def sia_binary_encode(self, encoder):
"""
Encode this binary data according to the Sia Binary Encoding format.
Either encoded as a slice or an array, depending on whether or not it is fixed sized.
"""
if self._fixed_size == None:
encoder.add_slice(self._value)
else:
encoder.add_array(self._value)
def rivine_binary_encode(self, encoder):
"""
Encode this binary data according to the Rivine Binary Encoding format.
Either encoded as a slice or an array, depending on whether or not it is fixed sized.
"""
if self._fixed_size == None:
encoder.add_slice(self._value)
else:
encoder.add_array(self._value)
class Hash(BinaryData):
SIZE = 32
"""
TFChain Hash Object, a special type of BinaryData
"""
def __init__(self, value=None):
super().__init__(value, fixed_size=Hash.SIZE, strencoding='hex')
@classmethod
def from_json(cls, obj):
if obj != None and not isinstance(obj, str):
raise TypeError(
"hash is expected to be an encoded string when part of a JSON object, not {}".format(type(obj)))
if obj == '':
obj = None
return cls(value=obj)
def __str__(self):
s = super().__str__()
if jsstr.isempty(s):
return jsstr.repeat('0', Hash.SIZE*2)
return s
class Currency(BaseDataTypeClass):
"""
TFChain Currency Object.
"""
def __init__(self, value=None):
self._value = None
self.value = value
@classmethod
def sum(cls, *values):
s = cls()
for value in values:
s.__iadd__(value)
return s
@classmethod
def from_str(cls, obj, lowest_unit=False):
if obj != None and not isinstance(obj, str):
raise TypeError(
"currency is expected to be a string , not type {}".format(type(obj)))
if obj == '':
obj = None
c = cls()
c.value = jsdec.Decimal(obj)
if lowest_unit:
c.value.__imul__(jsdec.Decimal('0.000000001'))
return c
@classmethod
def from_json(_, obj):
return Currency.from_str(obj, lowest_unit=True)
@property
def value(self):
return self._value
def plus(self, other):
return self.__add__(other)
def minus(self, other):
return self.__sub__(other)
def times(self, other):
return self.__mul__(other)
def divided_by(self, other):
return self.__truediv__(other)
def equal_to(self, other):
return self.__eq__(other)
def not_equal_to(self, other):
return self.__ne__(other)
def less_than(self, other):
return self.__lt__(other)
def greater_than(self, other):
return self.__gt__(other)
def less_than_or_equal_to(self, other):
return self.__le__(other)
def greater_than_or_equal_to(self, other):
return self.__ge__(other)
def negate(self):
return Currency(self.value.negate())
@value.setter
def value(self, value):
if value == None:
self._value = jsdec.Decimal()
return
if isinstance(value, Currency):
self._value = value.value
return
if isinstance(value, (int, str, jsdec.Decimal)):
inner_value = value
if isinstance(inner_value, str):
inner_value = jsstr.String(inner_value).upper().strip().value
if len(inner_value) >= 4 and inner_value[-3:] == 'TFT':
inner_value = jsstr.rstrip(inner_value[:-3])
d = jsdec.Decimal(inner_value)
_, _, exp = d.as_tuple() # sign is first return value
if exp < -9:
raise tferrors.CurrencyPrecisionOverflow(d.__str__())
# if sign != 0: # allow negative values for intermediate computations
# raise tferrors.CurrencyNegativeValue(d.__str__())
self._value = d
return
raise TypeError(
"cannot set value of type {} as Currency (invalid type): {}".format(type(value), value))
# operator overloading to allow currencies to be summed
def __add__(self, other):
if not isinstance(other, Currency):
return self.__add__(Currency(other))
return Currency(self.value.__add__(other.value))
def __radd__(self, other):
return self.__add__(other)
def __iadd__(self, other):
if not isinstance(other, Currency):
return self.__iadd__(Currency(other))
self._value.__iadd__(other.value)
return self
# operator overloading to allow currencies to be multiplied
def __mul__(self, other):
if not isinstance(other, Currency):
return self.__mul__(Currency(other))
return Currency(self.value.__mul__(other.value).to_nearest(9))
def __rmul__(self, other):
return self.__mul__(other)
def __imul__(self, other):
if not isinstance(other, Currency):
return self.__imul__(Currency(other))
self._value.__imul__(other.value)
return self
# operator overloading to allow currencies to be divided
def __truediv__(self, other):
if not isinstance(other, Currency):
return self.__truediv__(Currency(other))
return Currency(self.value.__truediv__(other.value).to_nearest(9))
# operator overloading to allow currencies to be subtracted
def __sub__(self, other):
if not isinstance(other, Currency):
return self.__sub__(Currency(other))
return Currency(self.value.__sub__(other.value))
def __rsub__(self, other):
return self.__sub__(other)
def __isub__(self, other):
if not isinstance(other, Currency):
return self.__isub__(Currency(other))
self._value.__isub__(other.value)
return self
# operator overloading to allow currencies to be compared
def __lt__(self, other):
if not isinstance(other, Currency):
return self.__lt__(Currency(other))
return self.value.__lt__(other.value)
def __le__(self, other):
if not isinstance(other, Currency):
return self.__le__(Currency(other))
return self.value.__le__(other.value)
def __eq__(self, other):
if not isinstance(other, Currency):
return self.__eq__(Currency(other))
return self.value.__eq__(other.value)
def __ne__(self, other):
if not isinstance(other, Currency):
return self.__ne__(Currency(other))
return self.value.__ne__(other.value)
def __gt__(self, other):
if not isinstance(other, Currency):
return self.__gt__(Currency(other))
return self.value.__gt__(other.value)
def __ge__(self, other):
if not isinstance(other, Currency):
return self.__ge__(Currency(other))
return self.value.__ge__(other.value)
@staticmethod
def _op_other_as_currency(other):
if isinstance(other, (int, str)):
other = Currency(value=other)
elif isinstance(other, float):
other = Currency(value=jsdec.Decimal(str(other)))
elif not isinstance(other, Currency):
raise TypeError(
"currency of type {} is not supported".format(type(other)))
return other
# allow our currency to be turned into an int
def __int__(self):
return jsstr.to_int(self.str(lowest_unit=True))
def bytes(self):
return self.value.bytes(prec=9)
def __str__(self):
return self.str()
def str(self, with_unit=False, lowest_unit=False, precision=9):
"""
Turn this Currency value into a str TFT unit-based value,
optionally with the currency notation.
@param with_unit: include the TFT currency suffix unit with the str
"""
s = self.value.str(precision)
if lowest_unit:
s = jsstr.lstrip(jsstr.replace(s, ".", ""), "0")
elif jsstr.contains(s, "."):
s = jsstr.rstrip(jsstr.rstrip(s, "0 "), '.')
if jsstr.isempty(s):
s = "0"
if with_unit:
s += " TFT"
return s
def __repr__(self):
return self.str(with_unit=True)
def json(self):
return self.str(lowest_unit=True)
def sia_binary_encode(self, encoder):
"""
Encode this currency according to the Sia Binary Encoding format.
"""
b = self.bytes()
encoder.add_int(len(b))
encoder.add_array(b)
def rivine_binary_encode(self, encoder):
"""
Encode this currency according to the Rivine Binary Encoding format.
"""
b = self.bytes()
encoder.add_slice(b)
class Blockstake(BaseDataTypeClass):
"""
TFChain Blockstake Object.
"""
def __init__(self, value=None):
self._value = Currency(value)
@classmethod
def from_json(cls, obj):
if obj != None and not isinstance(obj, str):
raise TypeError(
"block stake is expected to be a string when part of a JSON object, not type {}".format(type(obj)))
if obj == '':
obj = None
return cls(value=obj)
@property
def value(self):
return self._value
@value.setter
def value(self, value):
value._value = Currency(value=value)
# allow our block stake to be turned into an int
def __int__(self):
return jsstr.to_int(self.value.str(lowest_unit=False))
def str(self):
return jsstr.from_int(self.__int__())
def __str__(self):
return self.str()
def __repr__(self):
return self.__str__()
def json(self):
return self.__str__()
def bytes(self):
return self.value.bytes()
def sia_binary_encode(self, encoder):
"""
Encode this block stake (==Currency) according to the Sia Binary Encoding format.
"""
b = self.bytes()
encoder.add_int(len(b))
encoder.add_array(b)
def rivine_binary_encode(self, encoder):
"""
Encode this block stake (==Currency) according to the Rivine Binary Encoding format.
"""
b = self.bytes()
encoder.add_slice(b)
| 1.75 | 2 |
src/drone_control/mavros/sender.py | Adrien4193/drone_control | 0 | 12799991 | import rospy
from tf.transformations import quaternion_from_euler
from geometry_msgs.msg import Point, PoseStamped, Quaternion
from mavros_msgs.msg import Thrust
class Sender(object):
def __init__(self):
self._pubs = {}
self._pubs['position'] = rospy.Publisher(
'/mavros/setpoint_position/local',
PoseStamped, queue_size=10
)
self._pubs['attitude'] = rospy.Publisher(
'/mavros/setpoint_attitude/attitude',
PoseStamped, queue_size=10
)
self._pubs['velocity'] = rospy.Publisher(
'/mavros/setpoint_attitude/velocity',
PoseStamped, queue_size=10
)
self._pubs['thrust'] = rospy.Publisher(
'/mavros/setpoint_attitude/thrust',
Thrust, queue_size=10
)
self._pubs['mocap'] = rospy.Publisher(
'/mavros/mocap/pose',
PoseStamped, queue_size=10
)
def __del__(self):
for pub in self._pubs.values():
pass#pub.unregister()
def send_attitude(self, attitude):
self._pubs['attitude'].publish(attitude.get_message())
self._pubs['thrust'].publish(Thrust(thrust=attitude.thrust))
def send_velocity(self, attitude):
self._pubs['velocity'].publish(attitude.get_message())
self._pubs['thrust'].publish(Thrust(thrust=attitude.thrust))
def send_position(self, pose):
self._pubs['position'].publish(pose.get_message())
def send_mocap(self, pose):
self._pubs['mocap'].publish(pose.get_message())
| 1.664063 | 2 |
hub/tests/test_models_custom_link.py | yevgenykuz/dev-team-hub | 2 | 12799999 | from django.test import TestCase
from ..models import CustomLink
class CustomLinkModelTests(TestCase):
def setUp(self):
self.custom_link = CustomLink.objects.create(name='Google', url='https://www.google.com', order_id=1)
def test_new_object(self):
self.assertEquals(self.custom_link.name, 'Google')
self.assertEquals(self.custom_link.url, 'https://www.google.com')
self.assertEquals(self.custom_link.order_id, 1)
def test_field_name(self):
field_label = self.custom_link._meta.get_field('name').verbose_name
max_length = self.custom_link._meta.get_field('name').max_length
unique = self.custom_link._meta.get_field('name').unique
self.assertEquals(field_label, 'name')
self.assertEquals(max_length, 50)
self.assertEquals(unique, True)
def test_field_url(self):
field_label = self.custom_link._meta.get_field('url').verbose_name
max_length = self.custom_link._meta.get_field('url').max_length
self.assertEquals(field_label, 'url')
self.assertEquals(max_length, 255)
def test_field_order_id(self):
field_label = self.custom_link._meta.get_field('order_id').verbose_name
max_length = self.custom_link._meta.get_field('order_id').default
self.assertEquals(field_label, 'order id')
self.assertEquals(max_length, 0)
def test_object_presentation(self):
expected_presentation = f"[{self.custom_link.name}]: {self.custom_link.url}"
self.assertEquals(expected_presentation, str(self.custom_link))
def test_object_ordering(self):
self.assertEquals(self.custom_link._meta.ordering, ['order_id'])
| 1.671875 | 2 |