max_stars_repo_path
stringlengths
4
277
max_stars_repo_name
stringlengths
4
130
max_stars_count
int64
0
191k
id
stringlengths
1
8
content
stringlengths
1
996k
score
float64
-1.25
4.06
int_score
int64
0
4
tests/pydecompile-test/baselines/events_in_code_blocks.py
gengxf0505/pxt
1
1752
def function_0(): basic.showNumber(7) basic.forever(function_0)
0.554688
1
code-wars/010.moving-zeros-to-the-end.py
code-knayam/DataStructureAlgorithms
0
1824
# Write an algorithm that takes an array and moves all of the zeros to the end, preserving the order of the other elements. def move_zeros(array): #your code here new_array = [] new_index = 0 while len(array) > 0: item = array.pop(0) if item == 0 and not type(item) == bool : new_array.append(item) else: new_array.insert(new_index, item) new_index = new_index + 1 return new_array
2.328125
2
libbeat/tests/system/idxmgmt.py
dddpaul/beats
4
1832
import datetime import unittest import pytest from elasticsearch import NotFoundError class IdxMgmt(unittest.TestCase): def __init__(self, client, index): self._client = client self._index = index if index != '' and index != '*' else 'mockbeat' def needs_init(self, s): return s == '' or s == '*' def delete(self, indices=[], policies=[]): indices = list([x for x in indices if x != '']) if not indices: indices == [self._index] for i in indices: self.delete_index_and_alias(i) self.delete_template(template=i) for i in [x for x in policies if x != '']: self.delete_policy(i) def delete_index_and_alias(self, index=""): if self.needs_init(index): index = self._index try: self._client.transport.perform_request('DELETE', "/" + index + "*") except NotFoundError: pass def delete_template(self, template=""): if self.needs_init(template): template = self._index try: self._client.transport.perform_request('DELETE', "/_template/" + template + "*") except NotFoundError: pass def delete_policy(self, policy): # Delete any existing policy starting with given policy policies = self._client.transport.perform_request('GET', "/_ilm/policy") for p, _ in policies.items(): if not p.startswith(policy): continue try: self._client.transport.perform_request('DELETE', "/_ilm/policy/" + p) except NotFoundError: pass def assert_index_template_not_loaded(self, template): with pytest.raises(NotFoundError): self._client.transport.perform_request('GET', '/_template/' + template) def assert_index_template_loaded(self, template): resp = self._client.transport.perform_request('GET', '/_template/' + template) assert template in resp assert "lifecycle" not in resp[template]["settings"]["index"] def assert_ilm_template_loaded(self, template, policy, alias): resp = self._client.transport.perform_request('GET', '/_template/' + template) assert resp[template]["settings"]["index"]["lifecycle"]["name"] == policy assert resp[template]["settings"]["index"]["lifecycle"]["rollover_alias"] == alias def assert_index_template_index_pattern(self, template, index_pattern): resp = self._client.transport.perform_request('GET', '/_template/' + template) assert template in resp assert resp[template]["index_patterns"] == index_pattern def assert_alias_not_created(self, alias): resp = self._client.transport.perform_request('GET', '/_alias') for name, entry in resp.items(): if alias not in name: continue assert entry["aliases"] == {}, entry["aliases"] def assert_alias_created(self, alias, pattern=None): if pattern is None: pattern = self.default_pattern() name = alias + "-" + pattern resp = self._client.transport.perform_request('GET', '/_alias/' + alias) assert name in resp assert resp[name]["aliases"][alias]["is_write_index"] == True def assert_policy_not_created(self, policy): with pytest.raises(NotFoundError): self._client.transport.perform_request('GET', '/_ilm/policy/' + policy) def assert_policy_created(self, policy): resp = self._client.transport.perform_request('GET', '/_ilm/policy/' + policy) assert policy in resp assert resp[policy]["policy"]["phases"]["hot"]["actions"]["rollover"]["max_size"] == "50gb" assert resp[policy]["policy"]["phases"]["hot"]["actions"]["rollover"]["max_age"] == "30d" def assert_docs_written_to_alias(self, alias, pattern=None): # Refresh the indices to guarantee all documents are available # through the _search API. self._client.transport.perform_request('POST', '/_refresh') if pattern is None: pattern = self.default_pattern() name = alias + "-" + pattern data = self._client.transport.perform_request('GET', '/' + name + '/_search') self.assertGreater(data["hits"]["total"]["value"], 0) def default_pattern(self): d = datetime.datetime.now().strftime("%Y.%m.%d") return d + "-000001" def index_for(self, alias, pattern=None): if pattern is None: pattern = self.default_pattern() return "{}-{}".format(alias, pattern)
1.625
2
src/data_preprocess.py
QinganZhao/ML-based-driving-motion-prediction
18
1840
import numpy as np import matplotlib.pyplot as plt import matplotlib as mpl import matplotlib.patches as patches def load_data(file_name, car_flag): if car_flag == 1: data = np.loadtxt('./car1/'+str(file_name)) elif car_flag == 2: data = np.loadtxt('./car2/'+str(file_name)) return data def get_low_freq_data(data): """ Return a data matrix with 0.1s per time step data. (from 0.01s data) """ matrix = np.zeros((1, data.shape[1])) for i in range(data.shape[0]): if i % 10 == 0: matrix = np.concatenate((matrix, data[i,:].reshape(1,data.shape[1])),axis=0) return matrix[1:,:] def data_process(): """ This function serves to concatenate the information of two cars into one array. Note: car1 -- mainlane car; car2 -- merging car; OutFormat: 0 case_ID 1 frame_ID 2 car1_long_pos 3 car1_long_vel 4 car1_lateral_pos 5 car1_lateral_displacement 6 car2_long_pos 7 car2_long_vel 8 car2_lateral_pos 9 car2_lateral_displacement 10 relative_long_vel (merge - mainlane) 11 relative_lateral_distance (merge - mainlane) 12 relative_long_distance (merge - mainlane) 13 car1_yaw 14 car2_yaw 15 situation label: (0: car1 yields car2; 1: car2 yields car1) """ data_matrix = np.zeros((1,16)) for i in range(128): file_name_1 = 'data_'+str(i)+'_1.txt' file_name_2 = 'data_'+str(i)+'_2.txt' car1 = get_low_freq_data(load_data(file_name_1, 1)) car2 = get_low_freq_data(load_data(file_name_2, 2)) T = int(car1.shape[0]) #print(T) current_data_matrix = np.zeros((T,16)) for j in range(1, T): current_data_matrix[j,0] = i current_data_matrix[j,1] = j current_data_matrix[j,2] = car1[j,1] current_data_matrix[j,3] = 10 * (car1[j,1] - car1[j-1,1]) current_data_matrix[j,4] = car1[j,2] current_data_matrix[j,5] = car1[j,2] - car1[j-1,2] current_data_matrix[j,6] = car2[j,1] current_data_matrix[j,7] = 10 * (car2[j,1] - car2[j-1,1]) current_data_matrix[j,8] = car2[j,2] current_data_matrix[j,9] = car2[j,2] - car2[j-1,2] current_data_matrix[j,10] = current_data_matrix[j,7] - current_data_matrix[j,3] current_data_matrix[j,11] = current_data_matrix[j,8] - current_data_matrix[j,4] current_data_matrix[j,12] = current_data_matrix[j,6] - current_data_matrix[j,2] current_data_matrix[j,13] = car1[j,3] current_data_matrix[j,14] = car2[j,3] if car1[-1,1] > car2[-1,1]: current_data_matrix[j,15] = 1 else: current_data_matrix[j,15] = 0 current_data_matrix = current_data_matrix[1:, :] data_matrix = np.concatenate((data_matrix, current_data_matrix),axis=0) np.savetxt('./data_matrix.txt', data_matrix[1:,:],'%.4f') ################################################################## def divide_data(data_matrix, segment_length): """ This function serves to separate two situation cases. """ situation0_data = data_matrix[np.where(data_matrix[:,-1] == 0)] situation1_data = data_matrix[np.where(data_matrix[:,-1] == 1)] np.savetxt('./all_trajs_1.txt', situation0_data, '%.4f') np.savetxt('./all_trajs_2.txt', situation1_data, '%.4f') # count seq lengths # separate sequence segments # all_trajs_seg_1 = np.zeros((1, data_matrix.shape[1])) # all_trajs_seg_2 = np.zeros((1, data_matrix.shape[1])) all_trajs_1 = np.zeros((1, data_matrix.shape[1])) all_trajs_2 = np.zeros((1, data_matrix.shape[1])) count0, count1 = [], [] # for i in range(128): # print('i = '+str(i)) # temp_data = data_matrix[np.where(data_matrix[:,0] == i)] # if temp_data[0,-1] == 0: # for j in range(temp_data.shape[0]-segment_length+1): # temp_seg_data = temp_data[j:j+segment_length, :] # count0.append(temp_seg_data.shape[0]) # all_trajs_seg_1 = np.concatenate((all_trajs_seg_1, temp_seg_data),axis=0) # else: # for j in range(temp_data.shape[0]-segment_length+1): # temp_seg_data = temp_data[j:j+segment_length, :] # count1.append(temp_seg_data.shape[0]) # all_trajs_seg_2 = np.concatenate((all_trajs_seg_2, temp_seg_data),axis=0) for i in range(128): print('i = '+str(i)) temp_data = data_matrix[np.where(data_matrix[:,0] == i)] if temp_data[0,-1] == 0: count0.append(temp_data.shape[0]) all_trajs_1 = np.concatenate((all_trajs_1, temp_data),axis=0) elif temp_data[0,-1] == 1: count1.append(temp_data.shape[0]) all_trajs_2 = np.concatenate((all_trajs_2, temp_data),axis=0) print(all_trajs_1.shape) print(all_trajs_2.shape) print(sum(count0)) print(sum(count1)) # np.savetxt('./all_trajs_seg_1.txt', all_trajs_seg_1[1:,:], '%.4f') # np.savetxt('./all_trajs_seg_2.txt', all_trajs_seg_2[1:,:], '%.4f') np.savetxt('./all_trajs_seq_length_1.txt', np.array(count0), '%d') np.savetxt('./all_trajs_seq_length_2.txt', np.array(count1), '%d') #data_process() #data_matrix = np.loadtxt('./data_matrix.txt') #divide_data(data_matrix=data_matrix, segment_length=30) ############################################### def check_data(): data = np.loadtxt('../simulation_data/data_matrix.txt') temp_data = data[np.where(data[:,0]==69)] T = temp_data.shape[0] car1_long_vel = temp_data[:,3] car2_long_vel = temp_data[:,7] car1_acc = 10*(temp_data[1:,3]-temp_data[:-1,3]) car2_acc = 10*(temp_data[1:,7]-temp_data[:-1,7]) # plt.figure(1) # plt.plot(range(T-1), car1_acc, c='b', label='main lane car acceleration') # plt.plot(range(T-1), car2_acc, c='r', label='merging car acceleration') # plt.legend() plt.figure(2,figsize=(14,4)) plt.plot(range(T), car1_long_vel, c='b', label='main lane car velocity') plt.plot(range(T), car2_long_vel, c='r', label='merging car velocity') plt.legend() plt.savefig('./long_vel_69.eps', bbox_inches='tight') #plt.show() #check_data() ############################################### def plot_vehicles(case_id, data_matrix): """ This function is to plot vehicle trajectories with bounding boxes. """ current_case_data = data_matrix[np.where(data_matrix[:,0]==case_id)] T = current_case_data.shape[0] fig = plt.figure(figsize=(20,2)) for i in range(T): if i<10: name='00'+str(i) elif i>=10 and i<100: name = '0'+str(i) elif i>=100: name = str(i) ax = fig.add_subplot(111, aspect='equal') ax.add_patch( patches.Rectangle( (current_case_data[i,2]-2.0, current_case_data[i,4]-0.9), # (x,y) 4.0, # width 1.8, # height alpha = 0.3 + 0.7*(T-i) / float(T), facecolor='blue', edgecolor='black', linewidth=0.5 ) ) ax.add_patch( patches.Rectangle( (current_case_data[i,6]-2.0, current_case_data[i,8]-0.9), # (x,y) 4.0, # width 1.8, # height alpha = 0.3 + 0.7*(T-i) / float(T), facecolor='red', edgecolor='black', linewidth=0.5 ) ) ax.plot(range(-805,-360),-605*np.ones(445), color='k',linewidth=1) ax.plot(range(-805,-584),-610*np.ones(221), color='k',linewidth=1) ax.plot(range(-445,-360),-610*np.ones(85), color='k',linewidth=1) x = [[-584,-805],[-445,-805]] y = [[-610,-618],[-610,-622]] for l in range(len(x)): ax.plot(x[l], y[l], color='k',linewidth=1) ax.set_xlim(-680, -400) ax.set_ylim(-620, -600) ax.set_xticks([]) ax.set_yticks([]) fig.savefig('./vehicles_plot/'+str(case_id)+'_'+str(name)+'.png', bbox_inches='tight') data_matrix = np.loadtxt('./data_matrix.txt') plot_vehicles(case_id=8, data_matrix=data_matrix)
2.40625
2
engine_wrapper.py
lidevelopers/Lishogi-Bot-1
0
1856
import os import shogi import backoff import subprocess from util import * import logging logger = logging.getLogger(__name__) import engine_ctrl @backoff.on_exception(backoff.expo, BaseException, max_time=120) def create_engine(config, board): cfg = config["engine"] engine_path = os.path.realpath(os.path.join(cfg["dir"], cfg["name"])) engine_type = cfg.get("protocol") engine_options = cfg.get("engine_options") commands = [engine_path] if engine_options: for k, v in engine_options.items(): commands.append("--{}={}".format(k, v)) silence_stderr = cfg.get("silence_stderr", False) return USIEngine(board, commands, cfg.get("usi_options", {}), cfg.get("go_commands", {}), silence_stderr) class EngineWrapper: def __init__(self, board, commands, options=None, silence_stderr=False): pass def search_for(self, board, movetime): pass def first_search(self, board, movetime): pass def search(self, game, board, btime, wtime, binc, winc): pass def print_stats(self): pass def get_opponent_info(self, game): pass def name(self): return self.engine.name def report_game_result(self, game, board): pass def quit(self): self.engine.kill_process() def print_handler_stats(self): pass def get_handler_stats(self): pass class USIEngine(EngineWrapper): def __init__(self, board, commands, options, go_commands={}, silence_stderr=False): commands = commands[0] if len(commands) == 1 else commands self.go_commands = go_commands self.engine = engine_ctrl.Engine(commands) self.engine.usi() if options: for name, value in options.items(): self.engine.setoption(name, value) self.engine.isready() def first_search(self, board, movetime): best_move, _ = self.engine.go(board.sfen(), "", movetime=movetime) return best_move def search_with_ponder(self, game, board, btime, wtime, binc, winc, byo, ponder=False): moves = [m.usi() for m in list(board.move_stack)] cmds = self.go_commands if len(cmds) > 0: best_move, ponder_move = self.engine.go( game.initial_fen, moves, nodes=cmds.get("nodes"), depth=cmds.get("depth"), movetime=cmds.get("movetime"), ponder=ponder ) else: best_move, ponder_move = self.engine.go( game.initial_fen, moves, btime=btime, wtime=wtime, binc=binc, winc=winc, byo=byo, ponder=ponder ) return (best_move, ponder_move) def search(self, game, board, btime, wtime, binc, winc): cmds = self.go_commands moves = [m.usi() for m in list(board.move_stack)] best_move, _ = self.engine.go( game.initial_fen, moves, btime=btime, wtime=wtime, binc=binc, winc=winc, depth=cmds.get("depth"), nodes=cmds.get("nodes"), movetime=cmds.get("movetime") ) return best_move def stop(self): self.engine.kill_process() def print_stats(self, stats=None): if stats is None: stats = ['score', 'depth', 'nodes', 'nps'] info = self.engine.info for stat in stats: if stat in info: logger.info("{}: {}".format(stat, info[stat])) def get_stats(self, stats=None): if stats is None: stats = ['score', 'depth', 'nodes', 'nps'] info = self.engine.info stats_str = [] for stat in stats: if stat in info: stats_str.append("{}: {}".format(stat, info[stat])) return stats_str def get_opponent_info(self, game): name = game.opponent.name if name: rating = game.opponent.rating if game.opponent.rating is not None else "none" title = game.opponent.title if game.opponent.title else "none" player_type = "computer" if title == "BOT" else "human" def report_game_result(self, game, board): self.engine.protocol._position(board)
1.703125
2
ROS_packages/custom_ROS_envs/turtlebot2_maze_env/src/turtlebot2_maze_random.py
PierreExeter/custom_gym_envs
1
1864
#!/usr/bin/env python import gym import rospy from openai_ros.openai_ros_common import StartOpenAI_ROS_Environment # initialise environment rospy.init_node('turtlebot2_maze_random', anonymous=True, log_level=rospy.WARN) task_and_robot_environment_name = rospy.get_param('/turtlebot2/task_and_robot_environment_name') env = StartOpenAI_ROS_Environment(task_and_robot_environment_name) print("Environment: ", env) print("Action space: ", env.action_space) # print(env.action_space.high) # print(env.action_space.low) print("Observation space: ", env.observation_space) print(env.observation_space.high) print(env.observation_space.low) for episode in range(20): env.reset() for t in range(100): action = env.action_space.sample() obs, reward, done, info = env.step(action) print("episode: ", episode) print("timestep: ", t) print("obs: ", obs) print("action:", action) print("reward: ", reward) print("done: ", done) print("info: ", info) if done: print("Episode {} finished after {} timesteps".format(episode, t+1)) break env.close()
1.96875
2
authentication/migrate.py
anae09/electionWebService
0
1880
from flask import Flask; from configuration import Configuration; from flask_migrate import Migrate, init, migrate, upgrade; from models import database, Role, UserRole, User; from sqlalchemy_utils import database_exists, create_database; application = Flask(__name__); application.config.from_object(Configuration); migrateObject = Migrate(application, database); done = False; while not done: try: if not database_exists(application.config["SQLALCHEMY_DATABASE_URI"]): create_database(application.config["SQLALCHEMY_DATABASE_URI"]); database.init_app(application); with application.app_context() as context: init(); migrate(message="Production migration"); upgrade(); adminRole = Role(name="administrator"); userRole = Role(name="user"); database.session.add(adminRole); database.session.add(userRole); database.session.commit(); admin = User( jmbg="0000000000000", forename="admin", surname="admin", email="<EMAIL>", password="1" ); database.session.add(admin); database.session.commit(); userRole = UserRole( userId=admin.id, roleId=adminRole.id ); database.session.add(userRole); database.session.commit(); done = True; except Exception as err: print(err);
1.304688
1
run_mod.py
fpl-analytics/gr_crypto
0
1912
""" Setup: - Import Libraries - Setup tf on multiple cores - Import Data """ import pandas as pd import numpy as np import tensorflow as tf import seaborn as sns from time import time import multiprocessing import random import os from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, LSTM, ConvLSTM2D, Flatten from sklearn.preprocessing import StandardScaler from sklearn.linear_model import LinearRegression from sklearn.ensemble import RandomForestRegressor from joblib import dump, load from mod.prep import log_return, log_return_np, preprocess from mod.model import return_pred from mod.eval import evaluate_regression, evaluate_up_down cores = multiprocessing.cpu_count() tf.config.threading.set_inter_op_parallelism_threads(cores-1) root_folder = "data" wide_close = pd.read_csv(root_folder + "/working/wide_close.csv") wide_target = pd.read_csv(root_folder + "/working/wide_target.csv") asset_details = pd.read_csv(root_folder + "/asset_details.csv") assets = [str(i) for i in asset_details["Asset_ID"]] """ Preprocess """ close_returns = wide_close[assets].apply(log_return) close_returns["time"] = wide_close["time"] close_returns[assets] = close_returns[assets].replace([np.inf,-np.inf],np.nan) """ Linear Regression """ x_steps, y_steps = 60, [1, 15] col_in, col_out = "1", "1" train_x, test_x, train_y, test_y, time_d = preprocess(data_in = wide_close, col_in, col_out, time_col="time", x_steps, y_steps) # 1 step lr_1 = LinearRegression() lr_1.fit(train_x.reshape(-1, x_steps), train_y[:,0,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,0,:], lr_1) evaluate_regression(true, pred) evaluate_up_down(true, pred) # 15 step lr_15 = LinearRegression() lr_15.fit(train_x.reshape(-1, x_steps), train_y[:,1,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,1,:], lr_1) evaluate_regression(true, pred) evaluate_up_down(true, pred) """ calculate and store components seperately process: - first, get rolling values for each timestamp - then, predict 1 and 15 gaps and store in array """ # Production """ Steps: - Get train, val test and test indices. Importantly, this needs to cover all assets (even though not all assets exist) for the whole time period. - Build models """ assets = list(asset_details["Asset_ID"].astype(str)) # Get indexes i = np.select( [ (wide_close.index >= 0) & (wide_close.index <= (len(wide_close)*0.7)), (wide_close.index > (len(wide_close)*0.7)) & (wide_close.index <= (len(wide_close)*0.8)) ], ["train", "val"], default = "test") indexes = pd.DataFrame({"time":wide_close["time"], "set":i}) for a in assets: print("asset", a) filt = indexes["set"][~pd.isna(wide_close[a])] counts = filt.value_counts() df = pd.DataFrame({"counts":counts, "pct":counts/np.sum(counts)}) print(df, "\n\n") indexes_d = {} for s in indexes["set"].unique(): indexes_d[s] = indexes["time"][indexes["set"] == s] mkdir "model_files" mkdir "model_files/linear_regression" for a in assets: print("Asset", a) x_steps, y_steps = 60, [1, 16] cols_in, cols_out = a, a train_x, test_x, train_y, test_y, time_d = preprocess(wide_close, cols_in, cols_out, "time", x_steps, y_steps) # 1 step lr_1 = LinearRegression() lr_1.fit(train_x.reshape(-1, x_steps), train_y[:,0,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,0,:], lr_1) print("Model 1 Metrics") evaluate_regression(true, pred) evaluate_up_down(true, pred) # 16 step lr_16 = LinearRegression() lr_16.fit(train_x.reshape(-1, x_steps), train_y[:,1,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,1,:], lr_16) print("Model 16 Metrics") evaluate_regression(true, pred) evaluate_up_down(true, pred) dump(lr_1, f"model_files/linear_regression/lr_{a}_1") dump(lr_16, f"model_files/linear_regression/lr_{a}_16") dump(time_d, "model_files/linear_regression/lr_times") """ Random Forest """ rf = RandomForestRegressor(n_jobs=-1) # start = time.time() rf.fit(train_x.reshape(-1, x_steps), train_y.reshape(-1)) # print("Took:", round(start-time.time()))
2.09375
2
homeassistant/components/eight_sleep/binary_sensor.py
liangleslie/core
2
1920
"""Support for Eight Sleep binary sensors.""" from __future__ import annotations import logging from pyeight.eight import EightSleep from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import EightSleepBaseEntity from .const import DATA_API, DATA_HEAT, DOMAIN _LOGGER = logging.getLogger(__name__) async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the eight sleep binary sensor.""" if discovery_info is None: return eight: EightSleep = hass.data[DOMAIN][DATA_API] heat_coordinator: DataUpdateCoordinator = hass.data[DOMAIN][DATA_HEAT] entities = [] for user in eight.users.values(): entities.append( EightHeatSensor(heat_coordinator, eight, user.userid, "bed_presence") ) async_add_entities(entities) class EightHeatSensor(EightSleepBaseEntity, BinarySensorEntity): """Representation of a Eight Sleep heat-based sensor.""" _attr_device_class = BinarySensorDeviceClass.OCCUPANCY def __init__( self, coordinator: DataUpdateCoordinator, eight: EightSleep, user_id: str | None, sensor: str, ) -> None: """Initialize the sensor.""" super().__init__(coordinator, eight, user_id, sensor) assert self._user_obj _LOGGER.debug( "Presence Sensor: %s, Side: %s, User: %s", sensor, self._user_obj.side, user_id, ) @property def is_on(self) -> bool: """Return true if the binary sensor is on.""" assert self._user_obj return bool(self._user_obj.bed_presence)
1.703125
2
test.py
t-kaichi/hyperspoof
10
1944
import os from absl import app from absl import flags import numpy as np import tqdm from tensorflow.keras import Model from albumentations import ( Compose, HorizontalFlip, RandomBrightness,RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip) from utils import reset_tf from eval_utils import calc_score_variance from models import build_seg_model, build_pixel_mlp_class_model from VegetableSequence import VegetableDataset, VegetableSequence from temporal_random_seed import TemporalRandomSeed import myFlags FLAGS = flags.FLAGS def main(argv): reset_tf(FLAGS.device) ds_info = VegetableDataset(FLAGS.data_path) dim = ds_info.hsi_dims cats = ds_info.get_categories() # spoof file path assert FLAGS.spoof_type == "print" or FLAGS.spoof_type == "replay" spooffn = "224_224.m.rf.npy" spoofdir = '03' if FLAGS.spoof_type == 'print' else '04' # "04": replay spooffns = [os.path.join(ds_info.DATASET_ROOT_PATH, str(i).zfill(2), "05", spoofdir, spooffn) for i in cats] # dataset generation input_shape = (224, 224, dim) AUGMENTATIONS_ALL = Compose([ HorizontalFlip(p=0.5), VerticalFlip(p=0.2), RandomContrast(limit=0.001, p=0.5), RandomBrightness(limit=0.001, p=0.5), ShiftScaleRotate( shift_limit=0.3, scale_limit=0.9, rotate_limit=30, border_mode=4, p=0.8),# cv2.BORDER_REFLECT_101 ToFloat(max_value=1024) ]) AUGMENTATIONS_SIMPLE = Compose([ ToFloat(max_value=1024) ]) test_aug_gen = VegetableSequence(dataset=ds_info, instance_ids=[5], sample_ids=[1,2], random_state=2, batch_size=32, augmentations=AUGMENTATIONS_ALL, isTest=True) # build and load models print("building model") nb_classes = ds_info.object_categories seg_model = build_seg_model(input_shape=input_shape) seg_model.load_weights(FLAGS.seg_model) pix_class_model = build_pixel_mlp_class_model( nb_classes=nb_classes, input_shape=(1,dim)) pix_class_model.load_weights(FLAGS.class_model) penultimate_feat_extractor = Model(inputs=pix_class_model.input, outputs=pix_class_model.get_layer("penultimate").output) def predict_pixel_merge(xs): _xs_seg = np.argmax(seg_model.predict(xs), axis=-1) assert len(_xs_seg) == len(xs) _var_fs = [] # variance of the penultimate features for i in range(len(xs)): _x = xs[i] _x_seg = _xs_seg[i] _x_pixels = _x[_x_seg > 0] _x_pixels = _x_pixels[:, np.newaxis, :] _f_pixels = penultimate_feat_extractor.predict(_x_pixels, batch_size=224*224*dim).reshape(-1, FLAGS.penultimate_nodes) _var_f = np.sum(np.var(_f_pixels, axis=0)) _var_fs.append(_var_f) return _var_fs predict_func = predict_pixel_merge var_fs = [] true_labels = [] # process live images for i in tqdm.trange(FLAGS.live_augs, desc="live augumentations"): for batch in tqdm.tqdm(test_aug_gen, desc="live augumentations batch"): xs, ys = batch var_f = predict_func(xs) var_fs.extend(var_f) true_labels.extend(np.argmax(ys, axis=1)) # process spoof images with TemporalRandomSeed(2021): for fn in tqdm.tqdm(spooffns, desc="spoofs"): x = np.load(fn).astype("uint16") xs_aug = np.array([AUGMENTATIONS_ALL(image=x)["image"] for i in range(FLAGS.spoof_augs)]) var_f = predict_func(xs_aug) var_fs.extend(var_f) true_labels.extend([10000] * FLAGS.spoof_augs) # spoof label: 10000 # calculate accuracy true_labels = np.array(true_labels) var_fs = np.array(var_fs) bin_labels, uncertainties, results = calc_score_variance(true_labels, var_fs) # save results expr_name = parentdirname(FLAGS.class_model) save_result_cache(expr_name, bin_labels, uncertainties, results) return 0 def save_result_cache(expr_name, labels, uncertainties, results): dn = os.path.join(FLAGS.out_path, expr_name) os.makedirs(dn, exist_ok=True) np.save(os.path.join(dn, "binary_labels.npy"), labels) np.save(os.path.join(dn, "uncertainties.npy"), uncertainties) with open(os.path.join(dn, "results.txt"), "w") as f: for i, result in enumerate(["TNR95: ", "Detection acc.: ", "ROC: "]): f.write(result + str(results[i]) + "\n") print("saved to " + dn) def parentdirname(path): return os.path.basename(os.path.dirname(path)) if __name__ == "__main__": app.run(main)
1.773438
2
homeassistant/components/fritz/sensor.py
EuleMitKeule/core
3
1952
"""AVM FRITZ!Box binary sensors.""" from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from datetime import datetime, timedelta import logging from typing import Any, Literal from fritzconnection.core.exceptions import ( FritzActionError, FritzActionFailedError, FritzConnectionException, FritzInternalError, FritzServiceError, ) from fritzconnection.lib.fritzstatus import FritzStatus from homeassistant.components.sensor import ( STATE_CLASS_MEASUREMENT, STATE_CLASS_TOTAL_INCREASING, SensorEntity, SensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( DATA_GIGABYTES, DATA_RATE_KILOBITS_PER_SECOND, DATA_RATE_KILOBYTES_PER_SECOND, DEVICE_CLASS_TIMESTAMP, ENTITY_CATEGORY_DIAGNOSTIC, SIGNAL_STRENGTH_DECIBELS, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utcnow from .common import FritzBoxBaseEntity, FritzBoxTools from .const import DOMAIN, DSL_CONNECTION, UPTIME_DEVIATION _LOGGER = logging.getLogger(__name__) def _uptime_calculation(seconds_uptime: float, last_value: datetime | None) -> datetime: """Calculate uptime with deviation.""" delta_uptime = utcnow() - timedelta(seconds=seconds_uptime) if ( not last_value or abs((delta_uptime - last_value).total_seconds()) > UPTIME_DEVIATION ): return delta_uptime return last_value def _retrieve_device_uptime_state( status: FritzStatus, last_value: datetime ) -> datetime: """Return uptime from device.""" return _uptime_calculation(status.device_uptime, last_value) def _retrieve_connection_uptime_state( status: FritzStatus, last_value: datetime | None ) -> datetime: """Return uptime from connection.""" return _uptime_calculation(status.connection_uptime, last_value) def _retrieve_external_ip_state(status: FritzStatus, last_value: str) -> str: """Return external ip from device.""" return status.external_ip # type: ignore[no-any-return] def _retrieve_kb_s_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload transmission rate.""" return round(status.transmission_rate[0] / 1000, 1) # type: ignore[no-any-return] def _retrieve_kb_s_received_state(status: FritzStatus, last_value: str) -> float: """Return download transmission rate.""" return round(status.transmission_rate[1] / 1000, 1) # type: ignore[no-any-return] def _retrieve_max_kb_s_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload max transmission rate.""" return round(status.max_bit_rate[0] / 1000, 1) # type: ignore[no-any-return] def _retrieve_max_kb_s_received_state(status: FritzStatus, last_value: str) -> float: """Return download max transmission rate.""" return round(status.max_bit_rate[1] / 1000, 1) # type: ignore[no-any-return] def _retrieve_gb_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload total data.""" return round(status.bytes_sent / 1000 / 1000 / 1000, 1) # type: ignore[no-any-return] def _retrieve_gb_received_state(status: FritzStatus, last_value: str) -> float: """Return download total data.""" return round(status.bytes_received / 1000 / 1000 / 1000, 1) # type: ignore[no-any-return] def _retrieve_link_kb_s_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload link rate.""" return round(status.max_linked_bit_rate[0] / 1000, 1) # type: ignore[no-any-return] def _retrieve_link_kb_s_received_state(status: FritzStatus, last_value: str) -> float: """Return download link rate.""" return round(status.max_linked_bit_rate[1] / 1000, 1) # type: ignore[no-any-return] def _retrieve_link_noise_margin_sent_state( status: FritzStatus, last_value: str ) -> float: """Return upload noise margin.""" return status.noise_margin[0] / 10 # type: ignore[no-any-return] def _retrieve_link_noise_margin_received_state( status: FritzStatus, last_value: str ) -> float: """Return download noise margin.""" return status.noise_margin[1] / 10 # type: ignore[no-any-return] def _retrieve_link_attenuation_sent_state( status: FritzStatus, last_value: str ) -> float: """Return upload line attenuation.""" return status.attenuation[0] / 10 # type: ignore[no-any-return] def _retrieve_link_attenuation_received_state( status: FritzStatus, last_value: str ) -> float: """Return download line attenuation.""" return status.attenuation[1] / 10 # type: ignore[no-any-return] @dataclass class FritzRequireKeysMixin: """Fritz sensor data class.""" value_fn: Callable[[FritzStatus, Any], Any] @dataclass class FritzSensorEntityDescription(SensorEntityDescription, FritzRequireKeysMixin): """Describes Fritz sensor entity.""" connection_type: Literal["dsl"] | None = None SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( FritzSensorEntityDescription( key="external_ip", name="External IP", icon="mdi:earth", value_fn=_retrieve_external_ip_state, ), FritzSensorEntityDescription( key="device_uptime", name="Device Uptime", device_class=DEVICE_CLASS_TIMESTAMP, entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_device_uptime_state, ), FritzSensorEntityDescription( key="connection_uptime", name="Connection Uptime", device_class=DEVICE_CLASS_TIMESTAMP, entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_connection_uptime_state, ), FritzSensorEntityDescription( key="kb_s_sent", name="Upload Throughput", state_class=STATE_CLASS_MEASUREMENT, native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND, icon="mdi:upload", value_fn=_retrieve_kb_s_sent_state, ), FritzSensorEntityDescription( key="kb_s_received", name="Download Throughput", state_class=STATE_CLASS_MEASUREMENT, native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND, icon="mdi:download", value_fn=_retrieve_kb_s_received_state, ), FritzSensorEntityDescription( key="max_kb_s_sent", name="Max Connection Upload Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:upload", entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_max_kb_s_sent_state, ), FritzSensorEntityDescription( key="max_kb_s_received", name="Max Connection Download Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:download", entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_max_kb_s_received_state, ), FritzSensorEntityDescription( key="gb_sent", name="GB sent", state_class=STATE_CLASS_TOTAL_INCREASING, native_unit_of_measurement=DATA_GIGABYTES, icon="mdi:upload", value_fn=_retrieve_gb_sent_state, ), FritzSensorEntityDescription( key="gb_received", name="GB received", state_class=STATE_CLASS_TOTAL_INCREASING, native_unit_of_measurement=DATA_GIGABYTES, icon="mdi:download", value_fn=_retrieve_gb_received_state, ), FritzSensorEntityDescription( key="link_kb_s_sent", name="Link Upload Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:upload", value_fn=_retrieve_link_kb_s_sent_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_kb_s_received", name="Link Download Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:download", value_fn=_retrieve_link_kb_s_received_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_noise_margin_sent", name="Link Upload Noise Margin", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:upload", value_fn=_retrieve_link_noise_margin_sent_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_noise_margin_received", name="Link Download Noise Margin", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:download", value_fn=_retrieve_link_noise_margin_received_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_attenuation_sent", name="Link Upload Power Attenuation", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:upload", value_fn=_retrieve_link_attenuation_sent_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_attenuation_received", name="Link Download Power Attenuation", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:download", value_fn=_retrieve_link_attenuation_received_state, connection_type=DSL_CONNECTION, ), ) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up entry.""" _LOGGER.debug("Setting up FRITZ!Box sensors") fritzbox_tools: FritzBoxTools = hass.data[DOMAIN][entry.entry_id] if ( not fritzbox_tools.connection or "WANIPConn1" not in fritzbox_tools.connection.services ): # Only routers are supported at the moment return dsl: bool = False try: dslinterface = await hass.async_add_executor_job( fritzbox_tools.connection.call_action, "WANDSLInterfaceConfig:1", "GetInfo", ) dsl = dslinterface["NewEnable"] except ( FritzInternalError, FritzActionError, FritzActionFailedError, FritzServiceError, ): pass entities = [ FritzBoxSensor(fritzbox_tools, entry.title, description) for description in SENSOR_TYPES if dsl or description.connection_type != DSL_CONNECTION ] async_add_entities(entities, True) class FritzBoxSensor(FritzBoxBaseEntity, SensorEntity): """Define FRITZ!Box connectivity class.""" entity_description: FritzSensorEntityDescription def __init__( self, fritzbox_tools: FritzBoxTools, device_friendly_name: str, description: FritzSensorEntityDescription, ) -> None: """Init FRITZ!Box connectivity class.""" self.entity_description = description self._last_device_value: str | None = None self._attr_available = True self._attr_name = f"{device_friendly_name} {description.name}" self._attr_unique_id = f"{fritzbox_tools.unique_id}-{description.key}" super().__init__(fritzbox_tools, device_friendly_name) def update(self) -> None: """Update data.""" _LOGGER.debug("Updating FRITZ!Box sensors") try: status: FritzStatus = self._fritzbox_tools.fritz_status self._attr_available = True except FritzConnectionException: _LOGGER.error("Error getting the state from the FRITZ!Box", exc_info=True) self._attr_available = False return self._attr_native_value = ( self._last_device_value ) = self.entity_description.value_fn(status, self._last_device_value)
1.742188
2
books/rakutenapi.py
NobukoYano/LibraryApp
1
1968
import json import requests from django.conf import settings class rakuten: def get_json(self, isbn: str) -> dict: appid = settings.RAKUTEN_APP_ID # API request template api = "https://app.rakuten.co.jp/services/api/BooksTotal/"\ "Search/20170404?format=json&isbnjan={isbnjan}&"\ "applicationId={appid}" # format get api URL url = api.format(isbnjan=isbn, appid=appid) # execute r = requests.get(url) # decode to json # Check the status code status_code = r.status_code if status_code != 200: # if failed return None data = json.loads(r.text) if data['count'] == 0: return None json_data = {} json_data['isbn'] = data['Items'][0]['Item']['isbn'] json_data['title'] = data['Items'][0]['Item']['title'] json_data['publisher'] = data['Items'][0]['Item']['publisherName'] json_data['pubdate'] = data['Items'][0]['Item']['salesDate'] json_data['cover'] = data['Items'][0]['Item']['largeImageUrl'] json_data['author'] = data['Items'][0]['Item']['author'] return json_data
1.898438
2
utils/thin.py
BnF-jadis/projet
5
1984
# 2020, BackThen Maps # Coded by <NAME> https://github.com/RPetitpierre # For Bibliothèque nationale de France (BnF) import cv2, thinning, os import numpy as np import pandas as pd import shapefile as shp from skimage.measure import approximate_polygon from PIL import Image, ImageDraw from utils.utils import * from utils.match import toLatLon Image.MAX_IMAGE_PIXELS = 500000000 def skeletonize(road_network: np.ndarray, path: str = "workshop/vectorized.png", largest_component: bool = False): ''' Thinning/skeletonization of the road network image to a wired model. Input(s): road_network: black and white image of the road network (streets in white) path: path where the skeletonized image should be saved largest_component: if True, only the largest road network component will be kept Output(s): vectorized: skeletonized image ''' assert len(road_network.shape) == 2, 'ERROR: road_network must be grayscale image' img = cv2.resize(road_network, (road_network.shape[1]//2, road_network.shape[0]//2)) vectorized = thinning.guo_hall_thinning(img) vectorized[vectorized > 100] = 255 vectorized[vectorized <= 100] = 0 if largest_component: try: _, labels, stats, _ = cv2.connectedComponentsWithStats(vectorized.copy(), connectivity=8, stats=cv2.CC_STAT_AREA) stats = stats[1:] main_component = (np.argmax(stats[:,4])+1).astype('int32') vectorized = (labels == main_component).astype('uint8')*255 except: 'Warning: Skeletonization failed to apply largest_component = True param. Skipping.' cv2.imwrite(path, vectorized) return vectorized def findNodes(image: np.ndarray): ''' Find the nodes in the road network skeleton image. Input(s): image: skeletonized image Output(s): nodes: array of nodes coordinates (x, y) degree: degrees of the nodes (2=endpoint, 4=crossroads of 3 streets, 5=crossroads of 4 streets, etc.) addresses: directions of the crossing roads, with regard to the node ''' img = image.copy() # Find row and column locations that are non-zero (rows, cols) = np.nonzero(img) nodes, degree, addresses = [], [], [] for (r,c) in zip(rows, cols): if r > 0 and c > 0 and r < image.shape[0]-1 and c < image.shape[1]-1: # Extract an 8-connected neighbourhood (col_neigh, row_neigh) = np.meshgrid(np.array([c-1, c, c+1]), np.array([r-1, r, r+1])) # Cast to int to index into image col_neigh = col_neigh.astype('int') row_neigh = row_neigh.astype('int') # Convert into a single 1D array and check for non-zero locations pix_neighbourhood = img[row_neigh, col_neigh].ravel() != 0 # If the number of non-zero locations equals 2, add this to our list of coordinates n_neighbours = np.sum(pix_neighbourhood) if (n_neighbours == 2) or (n_neighbours >= 4): nodes.append((r, c)) degree.append(n_neighbours) direction_set = np.where(pix_neighbourhood == True)[0] direction_set = direction_set[direction_set != 4] addresses.append(direction_set) nodes = np.asarray(nodes) return nodes, degree, addresses def cleanNodesEdges(df_nodes: pd.DataFrame): df = df_nodes.copy() new_addresses, new_degree = [], [] for ind, address in df['address'].iteritems(): new_address = avoidDiagonalEdges(address) new_addresses.append(new_address) new_degree.append(len(new_address) + 1) df['address'] = new_addresses df['degree'] = new_degree return df def avoidDiagonalEdges(address: list, direction: int = None): right, diagonal = [1, 3, 5, 7], {0: [1, 3], 2: [1, 5], 6: [3, 7], 8: [5, 7]} new_address = [] for r in right: if r in address: new_address.append(r) for d in diagonal.keys(): if d in address: if not(diagonal[d][0] in address) and not(diagonal[d][1] in address): if direction != None: if not((8-direction) in diagonal[d]): new_address.append(d) else: new_address.append(d) return new_address def explorePath(start_x: int, start_y: int, start_dir: int, image: np.ndarray, nodes_grid: np.ndarray): ''' Follow the path from one given start node and direction until the next node, and stores the pixels on the way. Input(s): start_x: start node x-coordinate start_y: start node y-coordinate start_dir: starting direction ({0, 1, 2, 3, -, 5, 6, 7, 8}) image: skeletonized image of the road network nodes_grid: grid of the nodes of the skeletonized image Output(s): way: list of pixel coordinates on the way direction: last direction to reach the 2nd node nodes_grid[x, y]: degree of the arrival node ''' def absoluteWay(x: int, y: int, way: int): if way == 0: x_, y_ = x-1, y-1 elif way == 1: x_, y_ = x-1, y elif way == 2: x_, y_ = x-1, y+1 elif way == 3: x_, y_ = x, y-1 elif way == 5: x_, y_ = x, y+1 elif way == 6: x_, y_ = x+1, y-1 elif way == 7: x_, y_ = x+1, y elif way == 8: x_, y_ = x+1, y+1 else: raise AttributeError('Parameters invalid: (' + str(x) + ',' + str(y) + ',' + str(way) + '), way \ should be comprised between 0 and 8, and != 4. x, y and way should be of type int.') return x_, y_ def noTurnBack(direction: int): wrong_paths = [] if direction == 0: wrong_paths = [5, 7] elif direction == 1: wrong_paths = [6, 8] elif direction == 2: wrong_paths = [3, 7] elif direction == 3: wrong_paths = [2, 8] elif direction == 5: wrong_paths = [0, 6] elif direction == 6: wrong_paths = [1, 5] elif direction == 7: wrong_paths = [0, 2] elif direction == 8: wrong_paths = [1, 3] return wrong_paths direction = start_dir x, y = start_x, start_y assert image[x, y] != 0, 'ERROR: start point is not white' end = False way = [(x, y)] # First iteration new_x, new_y = absoluteWay(x, y, direction) assert image[new_x, new_y] != 0, 'ERROR: 2nd point is not white' way.append((new_x, new_y)) x, y = new_x, new_y wrong_paths = noTurnBack(direction) wrong_paths_active = True if nodes_grid[x, y]: end = True direction = 8-start_dir while not(end): if x > 0 and y > 0 and x < image.shape[0]-1 and y < image.shape[1]-1: # Extract an 8-connected neighbourhood (row_neigh, col_neigh) = np.meshgrid(np.array([x-1, x, x+1]), np.array([y-1, y, y+1])) # Cast to int to index into image col_neigh, row_neigh = col_neigh.astype('int'), row_neigh.astype('int') # Convert into a single 1D array and check for non-zero locations try: pix_neighbourhood = image[row_neigh, col_neigh].transpose().ravel() != 0 except: print(x, y, image.shape, ) raise AssertionError() # If the number of non-zero locations equals 2, add this to our list of coordinates n_neighbours = np.sum(pix_neighbourhood) direction_set = np.where(pix_neighbourhood == True)[0] last_ds = [wrong_paths] last_ds.append(direction_set) direction_set = direction_set[direction_set != 4] last_ds.append(direction_set) direction_set = direction_set[direction_set != (8-direction)] last_ds.append(direction_set) direction_set = np.asarray(avoidDiagonalEdges(direction_set, direction)) last_ds.append(direction_set) if wrong_paths_active: for wrong_path in wrong_paths: direction_set = direction_set[direction_set != wrong_path] wrong_paths_active = False if len(direction_set) != 1: end = True break direction = direction_set[0] new_x, new_y = absoluteWay(x, y, direction) way.append((new_x, new_y)) x, y = new_x, new_y if nodes_grid[x, y]: end = True else: end = True return way, direction, nodes_grid[x, y] def findSegments(df_nodes: pd.DataFrame, image: np.ndarray, min_length: int = 30, return_simple_ways: bool = True): ''' Find all the road segments in the network. Keep the ones that are longer than a given length or non-terminal. Optionally, compute the Douglas-Peucker simple itinerary of each segment and return it. Input(s): df_nodes: list of nodes image: skeletonized image of the road network min_length: min segment length if the segment is terminal return_simple_ways: if True, compute the Douglas-Peucker simple itinerary of each segment and return it Output(s): (Optional)(simple_ways: the Douglas-Peucker simple itinerary of each segmenty) ways: list of segments, containing all the pixels on the way between each couple of nodes nodes_grid: image containing all the nodes found in the image and their degree ''' img = image.copy() done, ways = [], [] df_nodes = df_nodes.sort_values(by='degree').reset_index(drop=True) nodes_grid = np.zeros(image.shape) for ind, row in df_nodes[['x', 'y', 'degree']].iterrows(): nodes_grid[row['x'], row['y']] = row['degree'] nodes_grid = nodes_grid.astype('int') for ind, node in df_nodes.iterrows(): for direct in node['address']: code = str(node['x']) + '_' + str(node['y']) + '_' + str(direct) if not(code in done): way, last_direct, degree = explorePath(start_x=node['x'], start_y=node['y'], start_dir=direct, image=img, nodes_grid=nodes_grid) if not((len(way) <= min_length) and ((node['degree'] == 2) or (degree == 2))): done.append(str(way[-1][0]) + '_' + str(way[-1][1]) + '_' + str(8-last_direct)) ways.append(way) if return_simple_ways: simple_ways = [] for way in ways: inv_way = np.asarray([np.asarray(way)[:,1], image.shape[0]-np.asarray(way)[:,0]]).transpose() simple_ways.append(approximate_polygon(np.asarray(inv_way), tolerance=1.6).tolist()) return simple_ways, ways, nodes_grid else: return ways, nodes_grid def thinImage(image: np.ndarray, image_name: str, export_file_path: str, exportPNG: bool = False, exportJSON: bool = False, exportSVG: bool = False, exportSHP: bool = False, geoloc: bool = False): assert (exportPNG or exportJSON or exportSVG or exportSHP) # Convert to B&W road_network = image.copy() road_network[road_network < 254] = 0 road_network[road_network < 255/2] = 0 road_network[road_network >= 255/2] = 255 vectorized = skeletonize(road_network, largest_component = True) nodes, degree, addresses = findNodes(vectorized) if len(degree) < 0: return [], [], np.zeros((image.shape[1], image.shape[0])) df_nodes = pd.DataFrame({'x': nodes[:,0], 'y': nodes[:,1], 'degree': degree, 'address': addresses }) df_nodes = df_nodes.sort_values(by='degree').reset_index(drop=True) df_nodes = cleanNodesEdges(df_nodes) df_nodes = df_nodes[df_nodes['degree'] != 3] if (exportJSON or exportSHP): simple_segments, full_segments, nodes_grid = findSegments(df_nodes, vectorized, min_length = 15, return_simple_ways = True) else: full_segments, nodes_grid = findSegments(df_nodes, vectorized, min_length = 15, return_simple_ways = False) simple_segments = [] if exportPNG: toPNG(full_segments, vectorized, export_file_path) elif exportSVG: toPNG(full_segments, vectorized, os.path.join('workshop', 'thin.png')) if geoloc: if exportJSON: project_name = getProjectName() try: with open(os.path.join('save', project_name, 'match' , 'primary', image_name + '.json')) as data: data = json.load(data) M = np.asarray(data['M']) simple_segments_JSON = [] for segment in simple_segments: s = np.asarray([2*np.asarray(segment)[:,0], image.shape[0]-(2*np.asarray(segment)[:,1])]).T simple_segments_JSON.append(toLatLon((s@M[:, :2]) + M[:, 2:3].transpose()).tolist()) except: print("La géolocalisation de l'image {} n'a pas encore été calculée. Par conséquent, \ il n'est pas possible de calculer la géolocalisation de son réseau filaire".format(image_name)) simple_segments_JSON = simple_segments else: print('La géolocalisation du réseau filaire ne fonctionne que pour le format JSON actuellement.') else: simple_segments_JSON = simple_segments if exportJSON: with open(export_file_path.replace('png', 'json'), 'w') as outfile: json.dump(simple_segments_JSON, outfile) if exportSHP: os.makedirs(export_file_path.replace('.png', ''), exist_ok=True) toShapefile(simple_segments, os.path.join(export_file_path.replace('.png', ''), image_name)) if exportSVG: print("\nAvertissement: Si vous n'avez jamais utilisé cette commande, \ installez d'abord Homebrew, ImageMagick et Potrace via le terminal.\n") print('Pour installer Homebrew:\n', ' /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"') print('Pour installer ImageMagick:\n', ' brew install imagemagick') print('Pour installer Potrace: \n', ' brew install potrace\n') if exportPNG: png_path = export_file_path else: png_path = os.path.join('workshop', 'thin.png') pnm_path = os.path.join('workshop', 'thin.pnm') svg_path = export_file_path.replace('png', 'svg') os.system('convert ' + png_path + pnm_path) os.system('potrace ' + pnm_path + ' -s -o ' + svg_path) return simple_segments, full_segments, nodes_grid def toPNG(segments: list, vectorized: np.ndarray, out_path: str): ''' Save a given set of segments as a bitmap image from the road network. Input(s): segments: list of segments, containing all the pixels on the way between each couple of nodes vectorized: skeletonized image of the road network out_path: the path, where the output bitmap image should be save ''' canvas = (np.ones(vectorized.shape)*255).astype('uint8') cv2.imwrite('workshop/canvas.png', canvas); bitmap = Image.open('workshop/canvas.png') draw = ImageDraw.Draw(bitmap) for segment in segments: coords = [] for point in segment: coords.append((point[1], point[0])) draw.line(coords, fill = 'black', width=0) bitmap.save(out_path) def toShapefile(simple_ways, out_path): w = shp.Writer(out_path) w.field('DeletionFlag', 'C', 1, 0) w.field('gid', 'N', 11, 0) w.field('streetname', 'C', 41, 0) w.field('note', 'C', 32, 0) for i in range(len(simple_ways)): w.line([simple_ways[i]]) w.record('01', i, '', '') w.close()
2.75
3
week1/85-maximal-rectangle.py
LionTao/algo_weekend
0
1992
""" leetcode-85 给定一个仅包含 0 和 1 , 大小为 rows x cols 的二维二进制矩阵, 找出只包含 1 的最大矩形, 并返回其面积。 """ from typing import List class Solution: def maximalRectangle(self, matrix: List[List[str]]) -> int: """ 统计直方图然后单调递增栈 """ rows = len(matrix) if rows == 0: return 0 columns = len(matrix[0]) res = 0 heights = [0]*columns for r in range(rows): for c in range(columns): if matrix[r][c]=="1": heights[c]+=1 else: heights[c]=0 res = max(res,self.largestRectangleArea(heights)) def largestRectangleArea(self, heights: List[int]) -> int: #单调递增栈 heights = [-1] + heights + [-1] res = 0 ascend_stack = [] for i in range(len(heights)): while ascend_stack and heights[ascend_stack[-1]] > heights[i]: window_L_height_min_height = heights[ascend_stack.pop(-1)] window_L = ascend_stack[-1] + 1 window_R = i - 1 cur_area = window_L_height_min_height * (window_R - window_L + 1) res = max(res, cur_area) ascend_stack.append(i) return res
2.765625
3
cacheable/adapter/PeeweeAdapter.py
d1hotpep/cacheable
0
2008
import peewee import playhouse.kv from time import time from . import CacheableAdapter class PeeweeAdapter(CacheableAdapter, peewee.Model): key = peewee.CharField(max_length=256, unique=True) value = playhouse.kv.JSONField() mtime = peewee.IntegerField(default=time) ttl = peewee.IntegerField(default=0) class Meta: database = peewee.Proxy() def __init__(self, db_connection, table_name=None): if table_name: self._meta.db_table = table_name self._meta.database.initialize(db_connection) def multiget(self, keys): cls = self.__class__ res = self.select(cls.key, cls.value) \ .where(cls.key << keys & self.__ttl_filter()) \ .tuples() return { x[0] : x[1] for x in res } @classmethod def multiset(cls, data, ttl=None): ts = int(time()) ttl = ttl or 0 kvs = [] for key, value in data.items(): kvs.append({ cls.key : key, cls.value : value, cls.mtime : ts, cls.ttl : ttl, }) cls.insert_many(kvs).upsert().execute() def delete(self, key_or_keys): if list == type(key_or_keys): keys = key_or_keys else: keys = [ key_or_keys ] cls = self.__class__ peewee.DeleteQuery(cls).where(cls.key << keys).execute() def list(self, prefix=None, limit=None): cls = self.__class__ q = self.select(cls.key, cls.value) if prefix: if self.__db_type() == peewee.SqliteDatabase: wildcard = '*' else: wildcard = '%' q = q.where(cls.key % ('%s%s' % (prefix, wildcard))) q = q.where(self.__ttl_filter()) if limit: q = q.limit(limit) res = { x[0] : x[1] for x in q.tuples() } if prefix: res = { k[len(prefix):] : v for k, v in res.items() } return res def __ttl_filter(self): """ Add the TTL where clause to a query, to filter out stale results """ ts = int(time()) cls = self.__class__ return cls.ttl == 0 | (cls.mtime + cls.ttl > ts) def __db_type(self): return type(self._meta.database.obj)
1.742188
2
indico/core/signals/event/core.py
tobiashuste/indico
0
2024
# This file is part of Indico. # Copyright (C) 2002 - 2020 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from indico.core.signals.event import _signals sidemenu = _signals.signal('sidemenu', """ Expected to return ``MenuEntryData`` objects to be added to the event side menu. A single entry can be returned directly, multiple entries must be yielded. """) deleted = _signals.signal('deleted', """ Called when an event is deleted. The *sender* is the event object. The `user` kwarg contains the user performing the deletion if available. """) updated = _signals.signal('updated', """ Called when basic data of an event is updated. The *sender* is the event. A dict of changes is passed in the `changes` kwarg, with ``(old, new)`` tuples for each change. Note than the `person_links` change may happen with `old` and `new` being the same lists for technical reasons. If the key is present, it should be assumed that something changed (usually the order or some data on the person link). """) cloned = _signals.signal('cloned', """ Called when an event is cloned. The *sender* is the `Event` object of the old event, the new event is passed in the `new_event` kwarg. """) type_changed = _signals.signal('type-changed', """ Called when the type of an event is changed. The `sender` is the event, the old type is passed in the `old_type` kwarg. """) moved = _signals.signal('moved', """ Called when an event is moved to a different category. The `sender` is the event, the old category is in the `old_parent` kwarg. """) created = _signals.signal('created', """ Called when a new event is created. The `sender` is the new Event. """) session_updated = _signals.signal('session-updated', """ Called when a session is updated. The *sender* is the session. """) session_deleted = _signals.signal('session-deleted', """ Called when a session is deleted. The *sender* is the session. """) session_block_deleted = _signals.signal('session-block-deleted', """ Called when a session block is deleted. The *sender* is the session block. This signal is called before the ``db.session.delete()`` on the block is executed. """) timetable_buttons = _signals.signal('timetable-buttons', """ Expected to return a list of tuples ('button_name', 'js-call-class'). Called when building the timetable view. """) get_log_renderers = _signals.signal('get-log-renderers', """ Expected to return `EventLogRenderer` classes. """) get_feature_definitions = _signals.signal('get-feature-definitions', """ Expected to return `EventFeature` subclasses. """) metadata_postprocess = _signals.signal('metadata-postprocess', """ Called right after a dict-like representation of an event is created, so that plugins can add their own fields. The *sender* is a string parameter specifying the source of the metadata. The *event* kwarg contains the event object. The metadata is passed in the `data` kwarg. The signal should return a dict that will be used to update the original representation (fields to add or override). """)
1.070313
1
surpyval/parametric/expo_weibull.py
dfm/SurPyval
0
2056
import autograd.numpy as np from scipy.stats import uniform from autograd import jacobian from numpy import euler_gamma from scipy.special import gamma as gamma_func from scipy.special import ndtri as z from scipy import integrate from scipy.optimize import minimize from surpyval import parametric as para from surpyval import nonparametric as nonp from surpyval.parametric.parametric_fitter import ParametricFitter from .fitters.mpp import mpp class ExpoWeibull_(ParametricFitter): def __init__(self, name): self.name = name self.k = 3 self.bounds = ((0, None), (0, None), (0, None),) self.support = (0, np.inf) self.plot_x_scale = 'log' self.y_ticks = [0.0001, 0.0002, 0.0003, 0.001, 0.002, 0.003, 0.005, 0.01, 0.02, 0.03, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.99, 0.999, 0.9999] self.param_names = ['alpha', 'beta', 'mu'] self.param_map = { 'alpha' : 0, 'beta' : 1, 'mu' : 2 } def _parameter_initialiser(self, x, c=None, n=None, offset=False): log_x = np.log(x) log_x[np.isnan(log_x)] = 0 gumb = para.Gumbel.fit(log_x, c, n, how='MLE') if not gumb.res.success: gumb = para.Gumbel.fit(log_x, c, n, how='MPP') mu, sigma = gumb.params alpha, beta = np.exp(mu), 1. / sigma if (np.isinf(alpha) | np.isnan(alpha)): alpha = np.median(x) if (np.isinf(beta) | np.isnan(beta)): beta = 1. if offset: gamma = np.min(x) - (np.max(x) - np.min(x))/10. return gamma, alpha, beta, 1. else: return alpha, beta, 1. def sf(self, x, alpha, beta, mu): r""" Survival (or reliability) function for the ExpoWeibull Distribution: .. math:: R(x) = 1 - \left [ 1 - e^{-\left ( \frac{x}{\alpha} \right )^\beta} \right ]^{\mu} Parameters ---------- x : numpy array or scalar The values at which the function will be calculated alpha : numpy array or scalar scale parameter for the ExpoWeibull distribution beta : numpy array or scalar shape parameter for the ExpoWeibull distribution mu : numpy array or scalar shape parameter for the ExpoWeibull distribution Returns ------- sf : scalar or numpy array The value(s) of the reliability function at x. Examples -------- >>> import numpy as np >>> from surpyval import ExpoWeibull >>> x = np.array([1, 2, 3, 4, 5]) >>> ExpoWeibull.sf(x, 3, 4, 1.2) array([9.94911330e-01, 8.72902497e-01, 4.23286791e-01, 5.06674866e-02, 5.34717283e-04]) """ return 1 - np.power(1 - np.exp(-(x / alpha)**beta), mu) def ff(self, x, alpha, beta, mu): r""" Failure (CDF or unreliability) function for the ExpoWeibull Distribution: .. math:: F(x) = \left [ 1 - e^{-\left ( \frac{x}{\alpha} \right )^\beta} \right ]^{\mu} Parameters ---------- x : numpy array or scalar The values at which the function will be calculated alpha : numpy array or scalar scale parameter for the ExpoWeibull distribution beta : numpy array or scalar shape parameter for the ExpoWeibull distribution mu : numpy array or scalar shape parameter for the ExpoWeibull distribution Returns ------- sf : scalar or numpy array The value(s) of the failure function at x. Examples -------- >>> import numpy as np >>> from surpyval import ExpoWeibull >>> x = np.array([1, 2, 3, 4, 5]) >>> ExpoWeibull.ff(x, 3, 4, 1.2) array([0.00508867, 0.1270975 , 0.57671321, 0.94933251, 0.99946528]) """ return np.power(1 - np.exp(-(x / alpha)**beta), mu) def cs(self, x, X, alpha, beta, mu): r""" Conditional survival (or reliability) function for the ExpoWeibull Distribution: .. math:: R(x, X) = \frac{R(x + X)}{R(X)} Parameters ---------- x : numpy array or scalar The values at which the function will be calculated alpha : numpy array or scalar scale parameter for the ExpoWeibull distribution beta : numpy array or scalar shape parameter for the ExpoWeibull distribution mu : numpy array or scalar shape parameter for the ExpoWeibull distribution Returns ------- sf : scalar or numpy array The value(s) of the reliability function at x. Examples -------- >>> import numpy as np >>> from surpyval import ExpoWeibull >>> x = np.array([1, 2, 3, 4, 5]) >>> ExpoWeibull.sf(x, 1, 3, 4, 1.2) array([8.77367129e-01, 4.25451775e-01, 5.09266354e-02, 5.37452200e-04, 1.35732908e-07]) """ return self.sf(x + X, alpha, beta, mu) / self.sf(X, alpha, beta, mu) def df(self, x, alpha, beta, mu): r""" Density function for the ExpoWeibull Distribution: .. math:: f(x) = \mu \left ( \frac{\beta}{\alpha} \right ) \left ( \frac{x}{\alpha} \right )^{\beta - 1} \left [ 1 - e^{-\left ( \frac{x}{\alpha} \right )^\beta} \right ]^{\mu - 1} e^{- \left ( \frac{x}{\alpha} \right )^\beta} Parameters ---------- x : numpy array or scalar The values at which the function will be calculated alpha : numpy array or scalar scale parameter for the ExpoWeibull distribution beta : numpy array or scalar shape parameter for the ExpoWeibull distribution mu : numpy array or scalar shape parameter for the ExpoWeibull distribution Returns ------- df : scalar or numpy array The value(s) of the density function at x. Examples -------- >>> import numpy as np >>> from surpyval import ExpoWeibull >>> x = np.array([1, 2, 3, 4, 5]) >>> ExpoWeibull.df(x, 3, 4, 1.2) array([0.02427515, 0.27589838, 0.53701385, 0.15943643, 0.00330058]) """ return (beta * mu * x**(beta - 1)) / (alpha**beta) \ * (1 - np.exp(-(x/alpha)**beta))**(mu - 1) \ * np.exp(-(x/alpha)**beta) def hf(self, x, alpha, beta, mu): r""" Instantaneous hazard rate for the ExpoWeibull Distribution: .. math:: h(x) = \frac{f(x)}{R(x)} Parameters ---------- x : numpy array or scalar The values at which the function will be calculated alpha : numpy array or scalar scale parameter for the ExpoWeibull distribution beta : numpy array or scalar shape parameter for the ExpoWeibull distribution mu : numpy array or scalar shape parameter for the ExpoWeibull distribution Returns ------- hf : scalar or numpy array The value(s) of the instantaneous hazard rate at x. Examples -------- >>> import numpy as np >>> from surpyval import ExpoWeibull >>> x = np.array([1, 2, 3, 4, 5]) >>> ExpoWeibull.hf(x, 3, 4, 1.2) array([0.02439931, 0.3160701 , 1.26867613, 3.14672068, 6.17256436]) """ return self.df(x, alpha, beta, mu) / self.sf(x, alpha, beta, mu) def Hf(self, x, alpha, beta, mu): r""" Instantaneous hazard rate for the ExpoWeibull Distribution: .. math:: H(x) = -\ln \left ( R(x) \right ) Parameters ---------- x : numpy array or scalar The values at which the function will be calculated alpha : numpy array or scalar scale parameter for the ExpoWeibull distribution beta : numpy array or scalar shape parameter for the ExpoWeibull distribution mu : numpy array or scalar shape parameter for the ExpoWeibull distribution Returns ------- Hf : scalar or numpy array The value(s) of the cumulative hazard rate at x. Examples -------- >>> import numpy as np >>> from surpyval import ExpoWeibull >>> x = np.array([1, 2, 3, 4, 5]) >>> ExpoWeibull.Hf(x, 3, 4, 1.2) array([5.10166141e-03, 1.35931416e-01, 8.59705336e-01, 2.98247086e+00, 7.53377239e+00]) """ return -np.log(self.sf(x, alpha, beta, mu)) def qf(self, p, alpha, beta, mu): r""" Instantaneous hazard rate for the ExpoWeibull Distribution: .. math:: q(p) = Parameters ---------- p : numpy array or scalar The percentiles at which the quantile will be calculated alpha : numpy array or scalar scale parameter for the ExpoWeibull distribution beta : numpy array or scalar shape parameter for the ExpoWeibull distribution mu : numpy array or scalar shape parameter for the ExpoWeibull distribution Returns ------- Q : scalar or numpy array The quantiles for the Weibull distribution at each value p Examples -------- >>> import numpy as np >>> from surpyval import ExpoWeibull >>> p = np.array([.1, .2, .3, .4, .5]) >>> ExpoWeibull.qf(p, 3, 4, 1.2) array([1.89361341, 2.2261045 , 2.46627621, 2.66992747, 2.85807988]) """ return alpha * (-np.log(1 - p**(1./mu)))**(1/beta) def mean(self, alpha, beta, mu): func = lambda x : x * self.df(x, alpha, beta, mu) top = 2 * self.qf(0.999, alpha, beta, mu) return integrate.quadrature(func, 0, top)[0] def random(self, size, alpha, beta, mu): U = uniform.rvs(size=size) return self.qf(U, alpha, beta, mu) def mpp_x_transform(self, x, gamma=0): return np.log(x - gamma) def mpp_y_transform(self, y, *params): mu = params[-1] mask = ((y == 0) | (y == 1)) out = np.zeros_like(y) out[~mask] = np.log(-np.log((1 - y[~mask]**(1./mu)))) out[mask] = np.nan return out def mpp_inv_y_transform(self, y, *params): i = len(params) mu = params[i-1] return (1 - np.exp(-np.exp(y)))**mu def unpack_rr(self, params, rr): #UPDATE ME if rr == 'y': beta = params[0] alpha = np.exp(params[1]/-beta) elif rr == 'x': beta = 1./params[0] alpha = np.exp(params[1] / (beta * params[0])) return alpha, beta, 1. ExpoWeibull = ExpoWeibull_('ExpoWeibull')
2.03125
2
backend/src/baserow/api/user/registries.py
ashishdhngr/baserow
1
2104
from baserow.core.registry import Instance, Registry class UserDataType(Instance): """ The user data type can be used to inject an additional payload to the API JWT response. This is the response when a user authenticates or refreshes his token. The returned dict of the `get_user_data` method is added to the payload under the key containing the type name. Example: class TestUserDataType(UserDataType): type = "test" def get_user_data(user, request): return {"test": "value"} user_data_registry.register(TestUserDataType()) Will result into the following response when the user authenticates: { "token": "<PASSWORD>....", "user: { "id": 1, ... }, "test": { "test": "value" } } """ def get_user_data(self, user, request) -> dict: """ Should return a dict containing the additional information that must be added to the response payload after the user authenticates. :param user: The related user that just authenticated. :type user: User :param request: The request when the user authenticated. :type request: Request :return: a dict containing the user data that must be added to the response. """ raise NotImplementedError( "The get_user_data must be implemented and should return a dict." ) class UserDataRegistry(Registry): name = "api_user_data" def get_all_user_data(self, user, request) -> dict: """ Collects the additional user data of all the registered user data type instances. :param user: The user that just authenticated. :type user: User :param request: The request when the user authenticated. :type request: Request :return: a dict containing all additional user data payload for all the registered instances. """ return { key: value.get_user_data(user, request) for key, value in self.registry.items() } user_data_registry = UserDataRegistry()
1.945313
2
dev/buildtool/metrics.py
premm1983/Spinnaker
0
2112
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Metrics support manager.""" import logging from buildtool import in_memory_metrics from buildtool import prometheus_metrics from buildtool import stackdriver_metrics from buildtool.util import add_parser_argument class MetricsManager(object): """Acts as factory for specialized BaseMetricsRegistry singleton.""" __metrics_registry = None @staticmethod def singleton(): """Returns the BaseMetricsRegistry once startup_metrics is called.""" if MetricsManager.__metrics_registry is None: raise Exception('startup_metrics was not called.') return MetricsManager.__metrics_registry @staticmethod def init_argument_parser(parser, defaults): """Init argparser with metrics-related options.""" in_memory_metrics.init_argument_parser(parser, defaults) prometheus_metrics.init_argument_parser(parser, defaults) stackdriver_metrics.init_argument_parser(parser, defaults) add_parser_argument( parser, 'metric_name_scope', defaults, 'buildtool', help='scope prefix for metrics generated by this tool') add_parser_argument( parser, 'monitoring_enabled', defaults, False, type=bool, help='Enable monitoring to stackdriver.') add_parser_argument( parser, 'monitoring_flush_frequency', defaults, 5, help='Frequency at which to push metrics in seconds.') add_parser_argument( parser, 'monitoring_system', defaults, 'file', choices=['file', 'prometheus', 'stackdriver'], help='Where to store metrics.') @staticmethod def startup_metrics(options): """Startup metrics module with concrete system.""" monitoring_systems = { 'file': in_memory_metrics.InMemoryMetricsRegistry, 'prometheus': prometheus_metrics.PrometheusMetricsRegistry, 'stackdriver': stackdriver_metrics.StackdriverMetricsRegistry } klas = monitoring_systems[options.monitoring_system] logging.info('Initializing monitoring with systme="%s"', klas.__name__) MetricsManager.__metrics_registry = klas(options) if options.monitoring_enabled and options.monitoring_flush_frequency > 0: MetricsManager.__metrics_registry.start_pusher_thread() return MetricsManager.__metrics_registry @staticmethod def shutdown_metrics(): """Write final metrics out to metrics server.""" registry = MetricsManager.singleton() registry.stop_pusher_thread() registry.flush_updated_metrics() registry.flush_final_metrics()
1.25
1
game/base/enemy.py
PythonixCoders/PyWeek29
8
2168
#!/usr/bin/env python from game.base.being import Being class Enemy(Being): def __init__(self, app, scene, **kwargs): super().__init__(app, scene, **kwargs) self.friendly = False
1.140625
1
src/entity_linker/models/figer_model/labeling_model.py
mjstrobl/WEXEA
10
2184
""" Modifications copyright (C) 2020 <NAME> """ import time import tensorflow as tf import numpy as np from entity_linker.models.base import Model class LabelingModel(Model): """Unsupervised Clustering using Discrete-State VAE""" def __init__(self, batch_size, num_labels, context_encoded_dim, true_entity_embeddings, word_embed_dim, context_encoded, mention_embed, scope_name, device): self.batch_size = batch_size self.num_labels = num_labels self.word_embed_dim = word_embed_dim with tf.variable_scope(scope_name) as s, tf.device(device) as d: if mention_embed == None: self.label_weights = tf.get_variable( name="label_weights", shape=[context_encoded_dim, num_labels], initializer=tf.random_normal_initializer(mean=0.0, stddev=1.0/(100.0))) else: context_encoded = tf.concat( 1, [context_encoded, mention_embed], name='con_ment_repr') self.label_weights = tf.get_variable( name="label_weights", shape=[context_encoded_dim+word_embed_dim, num_labels], initializer=tf.random_normal_initializer(mean=0.0, stddev=1.0/(100.0))) # [B, L] self.label_scores = tf.matmul(context_encoded, self.label_weights) self.label_probs = tf.sigmoid(self.label_scores) ### PREDICT TYPES FROM ENTITIES #true_entity_embeddings = tf.nn.dropout(true_entity_embeddings, keep_prob=0.5) self.entity_label_scores = tf.matmul(true_entity_embeddings, self.label_weights) self.entity_label_probs = tf.sigmoid(self.label_scores) def loss_graph(self, true_label_ids, scope_name, device_gpu): with tf.variable_scope(scope_name) as s, tf.device(device_gpu) as d: # [B, L] self.cross_entropy_losses = tf.nn.sigmoid_cross_entropy_with_logits( logits=self.label_scores, targets=true_label_ids, name="labeling_loss") self.labeling_loss = tf.reduce_sum( self.cross_entropy_losses) / tf.to_float(self.batch_size) self.enlabel_cross_entropy_losses = tf.nn.sigmoid_cross_entropy_with_logits( logits=self.entity_label_scores, targets=true_label_ids, name="entity_labeling_loss") self.entity_labeling_loss = tf.reduce_sum( self.enlabel_cross_entropy_losses) / tf.to_float(self.batch_size)
2.125
2
GUI Applications/calc.py
jaiswalIT02/pythonprograms
0
2208
from tkinter import Tk from tkinter import Entry from tkinter import Button from tkinter import StringVar t=Tk() t.title("<NAME>") t.geometry("425x300") t.resizable(0,0) t.configure(background="black")#back ground color a=StringVar() def show(c): a.set(a.get()+c) def equal(): x=a.get() a.set(eval(x)) def clear(): a.set("") e1=Entry(font=("",30),justify="right",textvariable=a) e1.place(x=0,y=0,width=425,height=50) b1=Button(text="7",font=("",25),bg="gray",fg="white",activebackground="yellow",command=show) b1.place(x=5,y=55,width=100,height=50) b1.configure(command=lambda:show("7")) b2=Button(text="8",font=("",25),bg="gray",fg="white",activebackground="yellow") b2.place(x=110,y=55,width=100,height=50) b2.configure(command=lambda:show("8")) b3=Button(text="9",font=("",25),bg="gray",fg="white",activebackground="yellow") b3.place(x=215,y=55,width=100,height=50) b3.configure(command=lambda:show("9")) b4=Button(text="+",font=("",25),bg="gray",fg="white",activebackground="yellow") b4.place(x=320,y=55,width=100,height=50) b4.configure(command=lambda:show("+")) b5=Button(text="4",font=("",25),bg="gray",fg="white",activebackground="yellow") b5.place(x=5,y=110,width=100,height=50) b5.configure(command=lambda:show("4")) b6=Button(text="5",font=("",25),bg="gray",fg="white",activebackground="yellow") b6.place(x=110,y=110,width=100,height=50) b6.configure(command=lambda:show("5")) b7=Button(text="6",font=("",25),bg="gray",fg="white",activebackground="yellow") b7.place(x=215,y=110,width=100,height=50) b7.configure(command=lambda:show("6")) b8=Button(text="-",font=("",25),bg="gray",fg="white",activebackground="yellow") b8.place(x=320,y=110,width=100,height=50) b8.configure(command=lambda:show("-")) b9=Button(text="1",font=("",25),bg="gray",fg="white",activebackground="yellow") b9.place(x=5,y=165,width=100,height=50) b9.configure(command=lambda:show("1")) b10=Button(text="2",font=("",25),bg="gray",fg="white",activebackground="yellow") b10.place(x=110,y=165,width=100,height=50) b10.configure(command=lambda:show("2")) b11=Button(text="3",font=("",25),bg="gray",fg="white",activebackground="yellow") b11.place(x=215,y=165,width=100,height=50) b11.configure(command=lambda:show("3")) b12=Button(text="*",font=("",25),bg="gray",fg="white",activebackground="yellow") b12.place(x=320,y=165,width=100,height=50) b12.configure(command=lambda:show("*")) b13=Button(text="C",font=("",25),bg="gray",fg="white",activebackground="yellow") b13.place(x=5,y=220,width=100,height=50) b13.configure(command=clear) b14=Button(text="0",font=("",25),bg="gray",fg="white",activebackground="yellow") b14.place(x=110,y=220,width=100,height=50) b14.configure(command=lambda:show("0")) b15=Button(text="=",font=("",25),bg="gray",fg="white",activebackground="yellow",command=equal) b15.place(x=215,y=220,width=100,height=50) b15.configure(command=equal) b16=Button(text="/",font=("",25),bg="gray",fg="white",activebackground="yellow") b16.place(x=320,y=220,width=100,height=50) b16.configure(command=lambda:show("/")) t.mainloop()
2.171875
2
taskengine/sessions.py
retmas-dv/deftcore
0
2248
__author__ = '<NAME>' from django.contrib.sessions.base_session import AbstractBaseSession from django.contrib.sessions.backends.db import SessionStore as DBStore class CustomSession(AbstractBaseSession): @classmethod def get_session_store_class(cls): return SessionStore class Meta: db_name = 'deft_adcr' db_table = '"ATLAS_DEFT"."DJANGO_SESSION"' class SessionStore(DBStore): @classmethod def get_model_class(cls): return CustomSession
1.023438
1
scrapy_compose/fields/parser/string_field.py
Sphynx-HenryAY/scrapy-compose
0
2288
from scrapy_compose.utils.context import realize from .field import FuncField as BaseField class StringField( BaseField ): process_timing = [ "post_pack" ] def __init__( self, key = None, value = None, selector = None, **kwargs ): #unify value format if isinstance( value, str ): value = { "_type": "string", "value": value } super( StringField, self ).__init__( key = key, value = value, selector = selector, **kwargs ) def make_field( self, selector, key = None, value = None, **kwargs ): return { realize( selector, key ): self.post_pack( realize( selector, value ) ) }
1.398438
1
tensorflow_probability/python/distributions/laplace_test.py
wataruhashimoto52/probability
1
2296
# Copyright 2018 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ from __future__ import absolute_import from __future__ import division from __future__ import print_function # Dependency imports import numpy as np from scipy import stats as sp_stats import tensorflow.compat.v2 as tf import tensorflow_probability as tfp from tensorflow_probability.python.internal import samplers from tensorflow_probability.python.internal import test_util tfd = tfp.distributions @test_util.test_all_tf_execution_regimes class LaplaceTest(test_util.TestCase): def testLaplaceShape(self): loc = tf.constant([3.0] * 5) scale = tf.constant(11.0) laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) self.assertEqual(self.evaluate(laplace.batch_shape_tensor()), (5,)) self.assertEqual(laplace.batch_shape, tf.TensorShape([5])) self.assertAllEqual(self.evaluate(laplace.event_shape_tensor()), []) self.assertEqual(laplace.event_shape, tf.TensorShape([])) def testLaplaceLogPDF(self): batch_size = 6 loc = tf.constant([2.0] * batch_size) scale = tf.constant([3.0] * batch_size) loc_v = 2.0 scale_v = 3.0 x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32) laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) log_pdf = laplace.log_prob(x) self.assertEqual(log_pdf.shape, (6,)) expected_log_pdf = sp_stats.laplace.logpdf(x, loc_v, scale=scale_v) self.assertAllClose(self.evaluate(log_pdf), expected_log_pdf) pdf = laplace.prob(x) self.assertEqual(pdf.shape, (6,)) self.assertAllClose(self.evaluate(pdf), np.exp(expected_log_pdf)) def testLaplaceLogPDFMultidimensional(self): batch_size = 6 loc = tf.constant([[2.0, 4.0]] * batch_size) scale = tf.constant([[3.0, 4.0]] * batch_size) loc_v = np.array([2.0, 4.0]) scale_v = np.array([3.0, 4.0]) x = np.array([[2.5, 2.5, 4.0, 0.1, 1.0, 2.0]], dtype=np.float32).T laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) log_pdf = laplace.log_prob(x) log_pdf_values = self.evaluate(log_pdf) self.assertEqual(log_pdf.shape, (6, 2)) pdf = laplace.prob(x) pdf_values = self.evaluate(pdf) self.assertEqual(pdf.shape, (6, 2)) expected_log_pdf = sp_stats.laplace.logpdf(x, loc_v, scale=scale_v) self.assertAllClose(log_pdf_values, expected_log_pdf) self.assertAllClose(pdf_values, np.exp(expected_log_pdf)) def testLaplaceLogPDFMultidimensionalBroadcasting(self): batch_size = 6 loc = tf.constant([[2.0, 4.0]] * batch_size) scale = tf.constant(3.0) loc_v = np.array([2.0, 4.0]) scale_v = 3.0 x = np.array([[2.5, 2.5, 4.0, 0.1, 1.0, 2.0]], dtype=np.float32).T laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) log_pdf = laplace.log_prob(x) log_pdf_values = self.evaluate(log_pdf) self.assertEqual(log_pdf.shape, (6, 2)) pdf = laplace.prob(x) pdf_values = self.evaluate(pdf) self.assertEqual(pdf.shape, (6, 2)) expected_log_pdf = sp_stats.laplace.logpdf(x, loc_v, scale=scale_v) self.assertAllClose(log_pdf_values, expected_log_pdf) self.assertAllClose(pdf_values, np.exp(expected_log_pdf)) def testLaplaceCDF(self): batch_size = 6 loc = tf.constant([2.0] * batch_size) scale = tf.constant([3.0] * batch_size) loc_v = 2.0 scale_v = 3.0 x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32) laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) cdf = laplace.cdf(x) self.assertEqual(cdf.shape, (6,)) expected_cdf = sp_stats.laplace.cdf(x, loc_v, scale=scale_v) self.assertAllClose(self.evaluate(cdf), expected_cdf) def testLaplaceLogCDF(self): batch_size = 6 loc = tf.constant([2.0] * batch_size) scale = tf.constant([3.0] * batch_size) loc_v = 2.0 scale_v = 3.0 x = np.array([-2.5, 2.5, -4.0, 0.1, 1.0, 2.0], dtype=np.float32) laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) cdf = laplace.log_cdf(x) self.assertEqual(cdf.shape, (6,)) expected_cdf = sp_stats.laplace.logcdf(x, loc_v, scale=scale_v) self.assertAllClose(self.evaluate(cdf), expected_cdf) def testLaplaceQuantile(self): qs = self.evaluate( tf.concat( [[0., 1], samplers.uniform([10], minval=.1, maxval=.9, seed=test_util.test_seed())], axis=0)) d = tfd.Laplace(loc=1., scale=1.3, validate_args=True) vals = d.quantile(qs) self.assertAllClose([-np.inf, np.inf], vals[:2]) self.assertAllClose(qs[2:], d.cdf(vals[2:])) def testLaplaceLogSurvivalFunction(self): batch_size = 6 loc = tf.constant([2.0] * batch_size) scale = tf.constant([3.0] * batch_size) loc_v = 2.0 scale_v = 3.0 x = np.array([-2.5, 2.5, -4.0, 0.1, 1.0, 2.0], dtype=np.float32) laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) sf = laplace.log_survival_function(x) self.assertEqual(sf.shape, (6,)) expected_sf = sp_stats.laplace.logsf(x, loc_v, scale=scale_v) self.assertAllClose(self.evaluate(sf), expected_sf) def testLaplaceMean(self): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True) self.assertEqual(laplace.mean().shape, (3,)) expected_means = sp_stats.laplace.mean(loc_v, scale=scale_v) self.assertAllClose(self.evaluate(laplace.mean()), expected_means) def testLaplaceMode(self): loc_v = np.array([0.5, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True) self.assertEqual(laplace.mode().shape, (3,)) self.assertAllClose(self.evaluate(laplace.mode()), loc_v) def testLaplaceVariance(self): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True) self.assertEqual(laplace.variance().shape, (3,)) expected_variances = sp_stats.laplace.var(loc_v, scale=scale_v) self.assertAllClose(self.evaluate(laplace.variance()), expected_variances) def testLaplaceStd(self): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True) self.assertEqual(laplace.stddev().shape, (3,)) expected_stddev = sp_stats.laplace.std(loc_v, scale=scale_v) self.assertAllClose(self.evaluate(laplace.stddev()), expected_stddev) def testLaplaceEntropy(self): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True) self.assertEqual(laplace.entropy().shape, (3,)) expected_entropy = sp_stats.laplace.entropy(loc_v, scale=scale_v) self.assertAllClose(self.evaluate(laplace.entropy()), expected_entropy) def testLaplaceSample(self): loc_v = 4.0 scale_v = 3.0 loc = tf.constant(loc_v) scale = tf.constant(scale_v) n = 100000 laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True) samples = laplace.sample(n, seed=test_util.test_seed()) sample_values = self.evaluate(samples) self.assertEqual(samples.shape, (n,)) self.assertEqual(sample_values.shape, (n,)) self.assertAllClose( sample_values.mean(), sp_stats.laplace.mean(loc_v, scale=scale_v), rtol=0.05, atol=0.) self.assertAllClose( sample_values.var(), sp_stats.laplace.var(loc_v, scale=scale_v), rtol=0.05, atol=0.) self.assertTrue(self._kstest(loc_v, scale_v, sample_values)) def testLaplaceFullyReparameterized(self): loc = tf.constant(4.0) scale = tf.constant(3.0) _, [grad_loc, grad_scale] = tfp.math.value_and_gradient( lambda l, s: tfd.Laplace(loc=l, scale=s, validate_args=True).sample( # pylint: disable=g-long-lambda 100, seed=test_util.test_seed()), [loc, scale]) self.assertIsNotNone(grad_loc) self.assertIsNotNone(grad_scale) def testLaplaceSampleMultiDimensional(self): loc_v = np.array([np.arange(1, 101, dtype=np.float32)]) # 1 x 100 scale_v = np.array([np.arange(1, 11, dtype=np.float32)]).T # 10 x 1 laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True) n = 10000 samples = laplace.sample(n, seed=test_util.test_seed()) sample_values = self.evaluate(samples) self.assertEqual(samples.shape, (n, 10, 100)) self.assertEqual(sample_values.shape, (n, 10, 100)) zeros = np.zeros_like(loc_v + scale_v) # 10 x 100 loc_bc = loc_v + zeros scale_bc = scale_v + zeros self.assertAllClose( sample_values.mean(axis=0), sp_stats.laplace.mean(loc_bc, scale=scale_bc), rtol=0.35, atol=0.) self.assertAllClose( sample_values.var(axis=0), sp_stats.laplace.var(loc_bc, scale=scale_bc), rtol=0.10, atol=0.) fails = 0 trials = 0 for ai, a in enumerate(np.reshape(loc_v, [-1])): for bi, b in enumerate(np.reshape(scale_v, [-1])): s = sample_values[:, bi, ai] trials += 1 fails += 0 if self._kstest(a, b, s) else 1 self.assertLess(fails, trials * 0.03) def _kstest(self, loc, scale, samples): # Uses the Kolmogorov-Smirnov test for goodness of fit. ks, _ = sp_stats.kstest(samples, sp_stats.laplace(loc, scale=scale).cdf) # Return True when the test passes. return ks < 0.02 def testLaplacePdfOfSampleMultiDims(self): laplace = tfd.Laplace(loc=[7., 11.], scale=[[5.], [6.]], validate_args=True) num = 50000 samples = laplace.sample(num, seed=test_util.test_seed()) pdfs = laplace.prob(samples) sample_vals, pdf_vals = self.evaluate([samples, pdfs]) self.assertEqual(samples.shape, (num, 2, 2)) self.assertEqual(pdfs.shape, (num, 2, 2)) self._assertIntegral(sample_vals[:, 0, 0], pdf_vals[:, 0, 0], err=0.02) self._assertIntegral(sample_vals[:, 0, 1], pdf_vals[:, 0, 1], err=0.02) self._assertIntegral(sample_vals[:, 1, 0], pdf_vals[:, 1, 0], err=0.02) self._assertIntegral(sample_vals[:, 1, 1], pdf_vals[:, 1, 1], err=0.02) self.assertAllClose( sp_stats.laplace.mean( [[7., 11.], [7., 11.]], scale=np.array([[5., 5.], [6., 6.]])), sample_vals.mean(axis=0), rtol=0.05, atol=0.) self.assertAllClose( sp_stats.laplace.var([[7., 11.], [7., 11.]], scale=np.array([[5., 5.], [6., 6.]])), sample_vals.var(axis=0), rtol=0.05, atol=0.) def _assertIntegral(self, sample_vals, pdf_vals, err=1e-3): s_p = zip(sample_vals, pdf_vals) prev = (0, 0) total = 0 for k in sorted(s_p, key=lambda x: x[0]): pair_pdf = (k[1] + prev[1]) / 2 total += (k[0] - prev[0]) * pair_pdf prev = k self.assertNear(1., total, err=err) def testLaplaceNonPositiveInitializationParamsRaises(self): loc_v = tf.constant(0.0, name='loc') scale_v = tf.constant(-1.0, name='scale') with self.assertRaisesOpError('Argument `scale` must be positive.'): laplace = tfd.Laplace( loc=loc_v, scale=scale_v, validate_args=True) self.evaluate(laplace.mean()) loc_v = tf.constant(1.0, name='loc') scale_v = tf.constant(0.0, name='scale') with self.assertRaisesOpError('Argument `scale` must be positive.'): laplace = tfd.Laplace( loc=loc_v, scale=scale_v, validate_args=True) self.evaluate(laplace.mean()) scale = tf.Variable([1., 2., -3.]) self.evaluate(scale.initializer) with self.assertRaisesOpError('Argument `scale` must be positive.'): d = tfd.Laplace(loc=0, scale=scale, validate_args=True) self.evaluate(d.sample(seed=test_util.test_seed())) def testLaplaceLaplaceKL(self): batch_size = 6 event_size = 3 a_loc = np.array([[0.5] * event_size] * batch_size, dtype=np.float32) a_scale = np.array([[0.1] * event_size] * batch_size, dtype=np.float32) b_loc = np.array([[0.4] * event_size] * batch_size, dtype=np.float32) b_scale = np.array([[0.2] * event_size] * batch_size, dtype=np.float32) a = tfd.Laplace(loc=a_loc, scale=a_scale, validate_args=True) b = tfd.Laplace(loc=b_loc, scale=b_scale, validate_args=True) distance = tf.abs(a_loc - b_loc) ratio = a_scale / b_scale true_kl = (-tf.math.log(ratio) - 1 + distance / b_scale + ratio * tf.exp(-distance / a_scale)) kl = tfd.kl_divergence(a, b) x = a.sample(int(1e4), seed=test_util.test_seed()) kl_sample = tf.reduce_mean(a.log_prob(x) - b.log_prob(x), axis=0) true_kl_, kl_, kl_sample_ = self.evaluate([true_kl, kl, kl_sample]) self.assertAllClose(true_kl_, kl_, atol=1e-5, rtol=1e-5) self.assertAllClose(true_kl_, kl_sample_, atol=0., rtol=1e-1) zero_kl = tfd.kl_divergence(a, a) true_zero_kl_, zero_kl_ = self.evaluate([tf.zeros_like(true_kl), zero_kl]) self.assertAllEqual(true_zero_kl_, zero_kl_) @test_util.tf_tape_safety_test def testGradientThroughParams(self): loc = tf.Variable([-5., 0., 5.]) scale = tf.Variable(2.) d = tfd.Laplace(loc=loc, scale=scale, validate_args=True) with tf.GradientTape() as tape: loss = -d.log_prob([1., 2., 3.]) grad = tape.gradient(loss, d.trainable_variables) self.assertLen(grad, 2) self.assertAllNotNone(grad) def testAssertsPositiveScaleAfterMutation(self): scale = tf.Variable([1., 2., 3.]) d = tfd.Laplace(loc=0., scale=scale, validate_args=True) self.evaluate([v.initializer for v in d.variables]) with self.assertRaisesOpError('Argument `scale` must be positive.'): with tf.control_dependencies([scale.assign([1., 2., -3.])]): self.evaluate(tfd.Laplace(loc=0., scale=1.).kl_divergence(d)) def testAssertParamsAreFloats(self): loc = tf.convert_to_tensor(0, dtype=tf.int32) scale = tf.convert_to_tensor(1, dtype=tf.int32) with self.assertRaisesRegexp(ValueError, 'Expected floating point'): tfd.Laplace(loc=loc, scale=scale) if __name__ == '__main__': tf.test.main()
1.320313
1
design_patterns/pubsub/simple_events/__init__.py
JASTYN/pythonmaster
3
2304
class Event: def __init__(self): self.handlers = set() def subscribe(self, func): self.handlers.add(func) def unsubscribe(self, func): self.handlers.remove(func) def emit(self, *args): for func in self.handlers: func(*args)
1.632813
2
backend/services/apns_util.py
xuantan/viewfinder
645
2320
# -*- coding: utf-8 -*- # Copyright 2012 Viewfinder Inc. All Rights Reserved. """Apple Push Notification service utilities. Original copyright for this code: https://github.com/jayridge/apnstornado TokenToBinary(): converts a hex-encoded token into a binary value CreateMessage(): formats a binary APNs message from parameters ParseResponse(): parses APNs binary response for status & identifier ErrorStatusToString(): converts error status to error message """ __author__ = '<EMAIL> (<NAME>)' import base64 import json import struct import time from tornado import escape _MAX_PAYLOAD_BYTES = 256 """Maximum number of bytes in the APNS payload.""" _ELLIPSIS_BYTES = escape.utf8(u'…') """UTF-8 encoding of the Unicode ellipsis character.""" def TokenToBinary(token): return base64.b64decode(token) def TokenFromBinary(bin_token): return base64.b64encode(bin_token) def CreateMessage(token, alert=None, badge=None, sound=None, identifier=0, expiry=None, extra=None, allow_truncate=True): token = TokenToBinary(token) if len(token) != 32: raise ValueError, u'Token must be a 32-byte binary string.' if (alert is not None) and (not isinstance(alert, (basestring, dict))): raise ValueError, u'Alert message must be a string or a dictionary.' if expiry is None: expiry = long(time.time() + 365 * 86400) # Start by determining the length of the UTF-8 encoded JSON with no alert text. This allows us to # determine how much space is left for the message. # 'content-available': 1 is necessary to trigger iOS 7's background download processing. aps = { 'alert' : '', 'content-available': 1 } if badge is not None: aps['badge'] = badge if sound is not None: aps['sound'] = sound data = { 'aps' : aps } if extra is not None: data.update(extra) # Create compact JSON representation with no extra space and no escaping of non-ascii chars (i.e. use # direct UTF-8 representation rather than "\u1234" escaping). This maximizes the amount of space that's # left for the alert text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES - len(encoded) if allow_truncate and isinstance(alert, basestring): alert = _TruncateAlert(alert, bytes_left) elif alert and len(escape.utf8(alert)) > bytes_left: raise ValueError, u'max payload(%d) exceeded: %d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now re-encode including the alert text. aps['alert'] = alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) length = len(encoded) assert length <= _MAX_PAYLOAD_BYTES, (encoded, length) return struct.pack('!bIIH32sH%(length)ds' % { 'length' : length }, 1, identifier, expiry, 32, token, length, encoded) def ParseResponse(bytes): if len(bytes) != 6: raise ValueError, u'response must be a 6-byte binary string.' command, status, identifier = struct.unpack_from('!bbI', bytes, 0) if command != 8: raise ValueError, u'response command must equal 8.' return status, identifier, ErrorStatusToString(status) def ErrorStatusToString(status): if status is 0: return 'No errors encountered' elif status is 1: return 'Processing error' elif status is 2: return 'Missing device token' elif status is 3: return 'Missing topic' elif status is 4: return 'Missing payload' elif status is 5: return 'Invalid token size' elif status is 6: return 'Invalid topic size' elif status is 7: return 'Invalid payload size' elif status is 8: return 'Invalid token' elif status is 255: return 'None (unknown)' else: return '' def _TruncateAlert(alert, max_bytes): """Converts the alert text to UTF-8 encoded JSON format, which is how the alert will be stored in the APNS payload. If the number of resulting bytes exceeds "max_bytes", then truncates the alert text at a Unicode character boundary, taking care not to split JSON escape sequences. Returns the truncated UTF-8 encoded alert text, including a trailing ellipsis character. """ alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip quotes added by JSON. alert_json = alert_json[1:-1] # Check if alert fits with no truncation. if len(alert_json) <= max_bytes: return escape.utf8(alert) # Make room for an appended ellipsis. assert max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes must be at least %d' % len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES) # Truncate the JSON UTF8 string at a Unicode character boundary. truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore') # If JSON escape sequences were split, then the truncated string may not be valid JSON. Keep # chopping trailing characters until the truncated string is valid JSON. It may take several # tries, such as in the case where a "\u1234" sequence has been split. while True: try: alert = json.loads(u'"%s"' % truncated) break except Exception: truncated = truncated[:-1] # Return the UTF-8 encoding of the alert with the ellipsis appended to it. return escape.utf8(alert) + _ELLIPSIS_BYTES
2.03125
2
ex035A11.py
gabrieleliasdev/python-cev
0
2328
print('\033[0;33;44mTeste\033[m') print('\033[4;33;44mTeste\033[m') print('\033[1;35;43mTeste\033[m') print('\033[7;32;40mTeste\033[m') print('\033[7;30mTeste\033[m') print(" - - - Testando os 40 - - -") print("\033[0;37;40mPreto\033[m") print("\033[0;30;41mVermelho\033[m") print("\033[0;30;42mVerde\033[m") print("\033[0;30;43mAmarelo\033[m") print("\033[0;30;44mRoxo\033[m") print("\033[0;30;45mLilás\033[m") print("\033[0;30;46mTurquesa\033[m") print("\033[0;30;47mBranco\033[m") print("\033[0;36;48mFundo Transparente\033[m") print(" - - - Testando os 30 - - -") print("\033[0;37;40mTeste\033[m") print("\033[0;31;40mTeste\033[m") print("\033[0;32;40mTeste\033[m") print("\033[0;33;40mTeste\033[m") print("\033[0;34;40mTeste\033[m") print("\033[0;35;40mTeste\033[m") print("\033[0;36;40mTeste\033[m") print("\033[0;37;40mTeste\033[m") print("\033[0;38;40mTeste\033[m") print(" - - - Testando os 1ª - - -") print("\033[0;30;47mTeste\033[m") print("\033[1;30;47mTexto em Negrito\033[m") print("\033[2;30;47mTeste\033[m") print("\033[3;30;47mFonta Itálica\033[m") print("\033[4;30;47mSublinhado\033[m") print("\033[5;30;47mTeste\033[m") print("\033[6;30;47mTeste\033[m") print("\033[7;30;47mTeste\033[m") print("\033[7;38;47mTeste\033[m")
0.976563
1
aws_sagemaker_studio/frameworks/tensorflow_mnist/mnist.py
jpmarques19/tensorflwo-test
5
2336
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """Convolutional Neural Network Estimator for MNIST, built with tf.layers.""" from __future__ import absolute_import, division, print_function import argparse import json import os import numpy as np import tensorflow as tf def cnn_model_fn(features, labels, mode): """Model function for CNN.""" # Input Layer # Reshape X to 4-D tensor: [batch_size, width, height, channels] # MNIST images are 28x28 pixels, and have one color channel input_layer = tf.reshape(features['x'], [-1, 28, 28, 1]) # Convolutional Layer #1 # Computes 32 features using a 5x5 filter with ReLU activation. # Padding is added to preserve width and height. # Input Tensor Shape: [batch_size, 28, 28, 1] # Output Tensor Shape: [batch_size, 28, 28, 32] conv1 = tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer #1 # First max pooling layer with a 2x2 filter and stride of 2 # Input Tensor Shape: [batch_size, 28, 28, 32] # Output Tensor Shape: [batch_size, 14, 14, 32] pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2) # Convolutional Layer #2 # Computes 64 features using a 5x5 filter. # Padding is added to preserve width and height. # Input Tensor Shape: [batch_size, 14, 14, 32] # Output Tensor Shape: [batch_size, 14, 14, 64] conv2 = tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer #2 # Second max pooling layer with a 2x2 filter and stride of 2 # Input Tensor Shape: [batch_size, 14, 14, 64] # Output Tensor Shape: [batch_size, 7, 7, 64] pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2) # Flatten tensor into a batch of vectors # Input Tensor Shape: [batch_size, 7, 7, 64] # Output Tensor Shape: [batch_size, 7 * 7 * 64] pool2_flat = tf.reshape(pool2, [-1, 7 * 7 * 64]) # Dense Layer # Densely connected layer with 1024 neurons # Input Tensor Shape: [batch_size, 7 * 7 * 64] # Output Tensor Shape: [batch_size, 1024] dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) # Add dropout operation; 0.6 probability that element will be kept dropout = tf.layers.dropout( inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN) # Logits layer # Input Tensor Shape: [batch_size, 1024] # Output Tensor Shape: [batch_size, 10] logits = tf.layers.dense(inputs=dropout, units=10) predictions = { # Generate predictions (for PREDICT and EVAL mode) 'classes': tf.argmax(input=logits, axis=1), # Add `softmax_tensor` to the graph. It is used for PREDICT and by the # `logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') } if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate Loss (for both TRAIN and EVAL modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure the Training Op (for TRAIN mode) if mode == tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) # Add evaluation metrics (for EVAL mode) eval_metric_ops = { 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train = np.load(os.path.join(base_dir, 'train_data.npy')) y_train = np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train, y_train def _load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir, 'eval_data.npy')) y_test = np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test, y_test def _parse_args(): parser = argparse.ArgumentParser() # Data, model, and output directories. # model_dir is always passed in from SageMaker. # By default this is a S3 path under the default bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def serving_input_fn(): inputs = {'x': tf.placeholder(tf.float32, [None, 784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__ == '__main__': args, _ = _parse_args() train_data, train_labels = _load_training_data(args.train) eval_data, eval_labels = _load_testing_data(args.train) # Create the Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set up logging for predictions # Log the values in the 'Softmax' tensor with label 'probabilities' tensors_to_log = {'probabilities': 'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the model train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=None, shuffle=True ) # Evaluate the model and print results eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False ) train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec) if args.current_host == args.hosts[0]: mnist_classifier.export_savedmodel(args.sm_model_dir, serving_input_fn)
2.375
2
pytouch/elements.py
Krai53n/pytouch
0
2344
from random import randint import pyxel from constants import Screen import cursors class Text: def __init__(self, text): self._text = text self._symbol_len = 3 self._padding_len = 1 def _count_text_len(self): return ( self._symbol_len + self._padding_len ) * len(self._text) - self._padding_len def _x_text_center_position(self): return (Screen.width - self._count_text_len()) // 2 def draw(self): pyxel.text(self._x_text_center_position(), 0, self._text, 2) class Score: def __init__(self, padding_right=2, padding_top=2): self._padding_right = padding_right self._padding_top = padding_top self.score = 0 def increase(self): self.score += 1 def reduce(self): self.score -= 1 def draw(self): pyxel.text(self._padding_right, self._padding_top, f"Score: {self.score}", (Screen.bg - 2) % 16) class Circle: def __init__(self): self._r = 0 self._col = (Screen.bg - 1) % 16 def zero(self): self._r = 0 def increase(self, size=1): self._r += size @property def r(self): return self._r @r.setter def r(self, r): self._r = r @property def col(self): return self._col @col.setter def col(self, color): self._col = color def draw(self, x, y): pyxel.circ(x, y, self._r, self._col) class ReachCircle(Circle): def __init__(self): super().__init__() self.min_r = 10 self.respawn() @property def x(self): return self._x @property def y(self): return self._y def respawn(self): self._x = randint(self._r, Screen.width - self._r) self._y = randint(self._r, Screen.height - self._r) self._r = randint(self.min_r, min(Screen.width, Screen.height) // 2) - 4 def draw(self): pyxel.circb(self._x, self._y, self._r, self._col)
2.4375
2
ocellaris/solver_parts/boundary_conditions/dirichlet.py
TormodLandet/Ocellaris
1
2352
# SPDX-License-Identifier: Apache-2.0 import dolfin from . import register_boundary_condition, BoundaryConditionCreator from ocellaris.utils import ( CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self, simulation, V, value, subdomain_marker, subdomain_id, updater=None ): """ A simple storage class for Dirichlet boundary conditions """ super().__init__( V, value, subdomain_marker, subdomain_id, method='geometric' ) self.simulation = simulation self._value = value self.subdomain_marker = subdomain_marker self.subdomain_id = subdomain_id self._updater = updater def func(self): """ The boundary value derivative function """ return self._value def ds(self): """ Returns the ds measure of the subdomain """ return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V): """ Return a copy with a new function space. Used when converting from BCs for a segregated solver (default) to BCs for a coupled solver """ return OcellarisDirichletBC( self.simulation, V, self._value, self.subdomain_marker, self.subdomain_id ) def update(self): """ Update the time and other parameters used in the BC. This is used every timestep and for all RK substeps """ if self._updater: self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt ) def __repr__(self): return '<OcellarisDirichletBC on subdomain %d>' % self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A prescribed constant value Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): """ Dirichlet condition with constant value """ self.simulation = simulation if var_name[-1].isdigit(): # A var_name like "u0" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name[:-1]] else: # A var_name like "u" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name] value = inp_dict.get_value('value', required_type='any') if isinstance(value, list): assert len(value) == simulation.ndim for d in range(simulation.ndim): name = '%s%d' % (var_name, d) self.register_dirichlet_condition( name, value[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, value, subdomains, subdomain_id): """ Add a Dirichlet condition to this variable """ if not isinstance(value, (float, int)): raise OcellarisError( 'Error in ConstantValue BC for %s' % var_name, 'The value %r is not a number' % value, ) df_value = dolfin.Constant(value) # Store the boundary condition for use in the solver bc = OcellarisDirichletBC( self.simulation, self.func_space, df_value, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant value %r for %s' % (value, var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description = 'A coded Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): """ Dirichlet condition with coded value """ self.simulation = simulation if var_name[-1].isdigit(): # A var_name like "u0" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name[:-1]] else: # A var_name like "u" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name] # Make a dolfin Expression object that runs the code string code = inp_dict.get_value('code', required_type='any') if isinstance(code, list): assert len(code) == simulation.ndim for d in range(simulation.ndim): name = '%s%d' % (var_name, d) description = 'coded value boundary condition for %s' % name sub_code = inp_dict.get_value('code/%d' % d, required_type='string') expr = CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name, expr, subdomains, subdomain_id) else: description = 'coded value boundary condition for %s' % var_name expr = CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): """ Store the boundary condition for use in the solver """ bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded value for %s' % var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A C++ coded Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): """ Dirichlet condition with C++ coded value """ self.simulation = simulation if var_name[-1].isdigit(): # A var_name like "u0" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name[:-1]] else: # A var_name like "u" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name] # Make a dolfin Expression object that runs the code string code = inp_dict.get_value('cpp_code', required_type='any') if isinstance(code, list): assert len(code) == simulation.ndim for d in range(simulation.ndim): name = '%s%d' % (var_name, d) sub_code = inp_dict.get_value('cpp_code/%d' % d, required_type='string') self.register_dirichlet_condition( name, sub_code, subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id) def register_dirichlet_condition( self, var_name, cpp_code, subdomains, subdomain_id ): """ Store the boundary condition for use in the solver """ description = 'boundary condititon for %s' % var_name P = self.func_space.ufl_element().degree() expr, updater = OcellarisCppExpression( self.simulation, cpp_code, description, P, return_updater=True ) bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id, updater=updater, ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++ coded value for %s' % var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition with values from a field function' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): """ Dirichlet boundary condition with value from a field function """ self.simulation = simulation if var_name[-1].isdigit(): # A var_name like "u0" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name[:-1]] else: # A var_name like "u" was given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name] # Get the field function expression object vardef = inp_dict.get_value('function', required_type='any') description = 'boundary condititon for %s' % var_name if isinstance(vardef, list): assert len(vardef) == simulation.ndim exprs = [ verify_field_variable_definition(simulation, vd, description) for vd in vardef ] else: expr = verify_field_variable_definition(simulation, vardef, description) if expr.ufl_shape != (): assert expr.ufl_shape == ( simulation.ndim, ), 'Expected shape %r got %r' % ((simulation.ndim,), expr.ufl_shape) exprs = [expr[d] for d in range(simulation.ndim)] else: exprs = [expr] # Register BCs if len(exprs) > 1: for d in range(simulation.ndim): name = '%s%d' % (var_name, d) self.register_dirichlet_condition( name, exprs[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition( var_name, exprs[0], subdomains, subdomain_id ) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): """ Store the boundary condition for use in the solver """ assert expr.ufl_shape == () bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field function value for %s' % var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition that compensates for non-zero total flux of a known velocity field' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): """ Dirichlet boundary condition with value from a field function """ self.simulation = simulation # A var_name like "u0" should be given. Look up "Vu" self.func_space = simulation.data['V%s' % var_name[:-1]] # Get the field function expression object vardef = inp_dict.get_value('function', required_type='any') description = 'boundary condititon for %s' % var_name self.velocity = verify_field_variable_definition( simulation, vardef, description ) field = simulation.fields[vardef.split('/')[0]] # The expression value is updated as the field is changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux = dolfin.Constant(1.0) # Create the bc = OcellarisDirichletBC( self.simulation, self.func_space, self.flux, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field velocity valve for %s' % var_name) # Compute the region area, then update the flux mesh = simulation.data['mesh'] self.area = dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions', required_type='list(string)') self.update() def update(self, timestep_number=None, t=None, dt=None): """ The main field has changed, update our flux to make the total sum to zero """ regions = self.simulation.data['boundary'] mesh = self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh) flux = 0 count = 0 for region in regions: if region.name in self.region_names: f = dolfin.dot(self.velocity, n) * region.ds() flux += dolfin.assemble(f) count += 1 assert count == len(self.region_names) # FIXME: assumes n is pointing outwards along the axis in the positive # direction in this boundary region self.flux.assign(dolfin.Constant(-flux / self.area))
1.773438
2
moto/dynamodbstreams/responses.py
jonnangle/moto-1
3
2368
from __future__ import unicode_literals from moto.core.responses import BaseResponse from .models import dynamodbstreams_backends from six import string_types class DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return dynamodbstreams_backends[self.region] def describe_stream(self): arn = self._get_param("StreamArn") return self.backend.describe_stream(arn) def list_streams(self): table_name = self._get_param("TableName") return self.backend.list_streams(table_name) def get_shard_iterator(self): arn = self._get_param("StreamArn") shard_id = self._get_param("ShardId") shard_iterator_type = self._get_param("ShardIteratorType") sequence_number = self._get_param("SequenceNumber") # according to documentation sequence_number param should be string if isinstance(sequence_number, string_types): sequence_number = int(sequence_number) return self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type, sequence_number ) def get_records(self): arn = self._get_param("ShardIterator") limit = self._get_param("Limit") if limit is None: limit = 1000 return self.backend.get_records(arn, limit)
1.351563
1
var/spack/repos/builtin/packages/strumpack/package.py
robertodr/spack
9
2384
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Strumpack(CMakePackage, CudaPackage): """STRUMPACK -- STRUctured Matrix PACKage - provides linear solvers for sparse matrices and for dense rank-structured matrices, i.e., matrices that exhibit some kind of low-rank property. It provides a distributed memory fully algebraic sparse solver and preconditioner. The preconditioner is mostly aimed at large sparse linear systems which result from the discretization of a partial differential equation, but is not limited to any particular type of problem. STRUMPACK also provides preconditioned GMRES and BiCGStab iterative solvers.""" homepage = "http://portal.nersc.gov/project/sparse/strumpack" url = "https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz" git = "https://github.com/pghysels/STRUMPACK.git" maintainers = ['pghysels'] version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build shared libraries') variant('mpi', default=True, description='Use MPI') variant('openmp', default=True, description='Enable thread parallellism via tasking with OpenMP') variant('cuda', default=True, description='Enable CUDA support') variant('parmetis', default=True, description='Enable use of ParMetis') variant('scotch', default=False, description='Enable use of Scotch') variant('butterflypack', default=True, description='Enable use of ButterflyPACK') variant('zfp', default=True, description='Build with support for compression using ZFP') variant('c_interface', default=True, description='Enable C interface') variant('count_flops', default=False, description='Build with flop counters') variant('task_timers', default=False, description='Build with timers for internal routines') variant('build_dev_tests', default=False, description='Build developer test routines') variant('build_tests', default=False, description='Build test routines') # TODO: add a slate variant depends_on('[email protected]:', type='build') depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('[email protected]', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('[email protected]:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self): spec = self.spec def on_off(varstr): return 'ON' if varstr in spec else 'OFF' args = [ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(";"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(";"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(";"), ] if spec.satisfies('@:3.9.999'): if '+mpi' in spec: args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ]) args.extend([ '-DBUILD_SHARED_LIBS=%s' % on_off('+shared') ]) return args
1.40625
1
delete.py
lvwuyunlifan/crop
0
2424
import os from PIL import Image, ImageFilter import matplotlib.pyplot as plt import matplotlib.image as mpimg # import seaborn as sns import pandas as pd import numpy as np import random train_path = './AgriculturalDisease_trainingset/' valid_path = './AgriculturalDisease_validationset/' def genImage(gpath, datatype): if datatype == 'train': gen_number = 0 # 统计生成的图片数量 if not os.path.exists(gpath+'delete'): os.makedirs(gpath+'delete') label = pd.read_csv(gpath + 'label.csv') label_gen_dict = {'img_path':[], 'label':[]} # 生成图片label for i in range(61): li = label[label['label'] == i] imagenum = li['label'].count() print('第%d个,总共有有%d个图片'%(i, imagenum)) imagelist = np.array(li['img_path']).tolist() img_path_gen, label_gen = [], [] # for imagefile in imagelist: for aa in range(len(imagelist)): if aa <= 40: print(aa) path, imagename = os.path.split(imagelist[aa]) im = Image.open(imagelist[aa]) im = im.convert('RGB') im_detail = im.transpose(Image.ROTATE_180) # im_detail = im.filter(ImageFilter.DETAIL) # 细节增强 img_path_gen.append(gpath + 'delete/' +'idetail_'+imagename) label_gen.extend([int(i)]) im_detail.save(gpath + 'delete/' +'idetail_'+imagename) gen_number += 1 label_dict = {'img_path':img_path_gen, 'label':label_gen} label_gen_dict['img_path'].extend(img_path_gen) label_gen_dict['label'].extend(label_gen) label_gen_pd = pd.DataFrame(label_dict) # label = label.append(label_gen_pd) # 将生成的图片label加入原先的label # label['label'] = label[['label']].astype('int64') # 转化为int64 # print(label) label_gen_p = pd.DataFrame(label_gen_dict) label_gen_p.to_csv(gpath + 'label_delete.csv', index=False) # label_gen_p = pd.DataFrame(label_gen_dict) # label_gen_p.to_csv(gpath + 'label_gen.csv', index=False) print('训练集总共生成%d个图片'%gen_number) if datatype == 'valid': gen_number = 0 if not os.path.exists(gpath+'delete'): os.makedirs(gpath+'delete') label = pd.read_csv(gpath + 'label.csv') label_gen_dict = {'img_path':[], 'label':[]} for i in range(61): li = label[label['label'] == i] imagenum = li['label'].count() print('第%d个,总共有有%d个图片'%(i, imagenum)) imagelist = np.array(li['img_path']).tolist() img_path_gen, label_gen = [], [] # for imagefile in imagelist: for aa in range(len(imagelist)): if aa <= 20: print(aa) path, imagename = os.path.split(imagelist[aa]) im = Image.open(imagelist[aa]) im = im.convert('RGB') im_detail = im.transpose(Image.ROTATE_180) #im_detail = im.filter(ImageFilter.DETAIL) # 细节增强 img_path_gen.append(gpath + 'delete/' + 'idetail_' + imagename) label_gen.extend([int(i)]) im_detail.save(gpath + 'delete/' + 'idetail_' + imagename) gen_number += 1 label_dict = {'img_path': img_path_gen, 'label': label_gen} label_gen_dict['img_path'].extend(img_path_gen) label_gen_dict['label'].extend(label_gen) label_gen_pd = pd.DataFrame(label_dict) # label = label.append(label_gen_pd) # 将生成的图片label加入原先的label # label['label'] = label[['label']].astype('int64') # 转化为int64 # print(label) label_gen_p = pd.DataFrame(label_gen_dict) label_gen_p.to_csv(gpath + 'label_delete.csv', index=False) print('验证集总共生成%d个图片'%gen_number) if __name__ == '__main__': genImage(train_path, 'train') genImage(valid_path, 'valid')
2.09375
2
yue/core/explorer/ftpsource.py
nsetzer/YueMusicPlayer
0
2432
from ftplib import FTP,error_perm, all_errors import posixpath from io import BytesIO,SEEK_SET from .source import DataSource import sys import re reftp = re.compile('(ssh|ftp)\:\/\/(([^@:]+)?:?([^@]+)?@)?([^:]+)(:[0-9]+)?\/(.*)') def parseFTPurl( url ): m = reftp.match( url ) if m: g = m.groups() result = { "mode" : g[0], "username" : g[2] or "", "password" : g[3] or "", "hostname" : g[4] or "", "port" : int(g[5][1:]) if g[5] else 0, "path" : g[6] or "/", } if result['port'] == 0: if result['mode'] == ssh: result['port'] = 22 else: result['port'] = 21 # ftp port default return result raise ValueError("invalid: %s"%url) def utf8_fix(s): return ''.join([ a if ord(a)<128 else "%02X"%ord(a) for a in s]) class FTPWriter(object): """docstring for FTPWriter""" def __init__(self, ftp, path): super(FTPWriter, self).__init__() self.ftp = ftp self.path = path self.file = BytesIO() def write(self,data): return self.file.write(data) def seek(self,pos,whence=SEEK_SET): return self.file.seek(pos,whence) def tell(self): return self.file.tell() def close(self): self.file.seek(0) text = "STOR " + utf8_fix(self.path) self.ftp.storbinary(text, self.file) def __enter__(self): return self def __exit__(self,typ,val,tb): if typ is None: self.close() class FTPReader(object): """docstring for FTPWriter""" def __init__(self, ftp, path): super(FTPReader, self).__init__() self.ftp = ftp self.path = path self.file = BytesIO() # open the file text = "RETR " + utf8_fix(self.path) self.ftp.retrbinary(text, self.file.write) self.file.seek(0) def read(self,n=None): return self.file.read(n) def seek(self,pos,whence=SEEK_SET): return self.file.seek(pos,whence) def tell(self): return self.file.tell() def close(self): self.file.close() def __enter__(self): return self def __exit__(self,typ,val,tb): if typ is None: self.close() class FTPSource(DataSource): """ there is some sort of problem with utf-8/latin-1 and ftplib storbinary must accepts a STRING, since it builds a cmd and add the CRLF to the input argument using the plus operator. the command fails when given unicode text (ord > 127) and also fails whenm given a byte string. """ # TODO: turn this into a directory generator # which first loads the directory, then loops over # loaded items. # TODO: on windows we need a way to view available # drive letters def __init__(self, host, port, username="", password=""): super(FTPSource, self).__init__() self.ftp = FTP() self.ftp.connect(host,port) self.ftp.login(username,password) self.hostname = "%s:%d"%(host,port) def root(self): return "/" def close(self): try: self.ftp.quit() except all_errors as e: sys.stderr.write("Error Closing FTP connection\n") sys.stderr.write("%s\n"%e) super().close() def fix(self, path): return utf8_fix(path) def join(self,*args): return posixpath.join(*args) def breakpath(self,path): return [ x for x in path.replace("/","\\").split("\\") if x ] def relpath(self,path,base): return posixpath.relpath(path,base) def normpath(self,path,root=None): if root and not path.startswith("/"): path = posixpath.join(root,path) return posixpath.normpath( path ) def listdir(self,path): return self.ftp.nlst(path) def parent(self,path): # TODO: if path is C:\\ return empty string ? # empty string returns drives p,_ = posixpath.split(path) return p def move(self,oldpath,newpath): self.ftp.rename(oldpath,newpath) def delete(self,path): # todo support removing directory rmdir() path = utf8_fix(path) if self.exists( path ): if self.isdir(path): try: self.ftp.rmd(path) except Exception as e: print("ftp delete error: %s"%e) else: try: self.ftp.delete(path) except Exception as e: print("ftp delete error: %s"%e) def open(self,path,mode): if mode=="wb": return FTPWriter(self.ftp,path) elif mode=="rb": return FTPReader(self.ftp,path) raise NotImplementedError(mode) def exists(self,path): path = utf8_fix(path) p,n=posixpath.split(path) lst = set(self.listdir(p)) return n in lst def isdir(self,path): path = utf8_fix(path) try: return self.ftp.size(path) is None except error_perm: # TODO: to think about more later, # under my use-case, I'm only asking if a path is a directory # if I Already think it exists. Under the current FTP impl # ftp.size() fails for various reasons unless the file exists # and is an accessable file. I can infer that a failure to # determine the size means that the path is a directory, # but this does not hold true under other use cases. # I can't cache listdir calls, but if I could, then I could # use that to determine if the file exists return True#self.exists( path ) def mkdir(self,path): # this is a really ugly quick and dirty solution path = utf8_fix(path) if not self.exists(path): p = self.parent( path ) try: if not self.exists(p): self.ftp.mkd( p ) self.ftp.mkd(path) except Exception as e: print("ftp mkd error: %s"%e) def split(self,path): return posixpath.split(path) def splitext(self,path): return posixpath.splitext(path) def stat(self,path): try: size = self.ftp.size(path) except error_perm: size = None result = { "isDir" : size is None, "isLink": False, "mtime" : 0, "ctime" : 0, "size" : size or 0, "name" : self.split(path)[1], "mode" : 0 } return result def stat_fast(self,path): # not fast for thus file system :( try: size = self.ftp.size(path) except error_perm: size = None result = { "name" : self.split(path)[1], "size" : size or 0, "isDir" : size is None, "isLink" : False, } return result def chmod(self,path,mode): print("chmod not implemented") def getExportPath(self,path): return self.hostname+path
1.945313
2
tests/test_missing_process.py
ricklupton/sphinx_probs_rdf
1
2472
import pytest from rdflib import Graph, Namespace, Literal from rdflib.namespace import RDF, RDFS from sphinx_probs_rdf.directives import PROBS SYS = Namespace("http://example.org/system/") @pytest.mark.sphinx( 'probs_rdf', testroot='missing', confoverrides={'probs_rdf_system_prefix': str(SYS)}) def test_builder_reports_warning_for_missing_process(app, status, warning): app.builder.build_all() assert "build succeeded" not in status.getvalue() warnings = warning.getvalue().strip() assert 'WARNING: Requested child "http://example.org/system/Missing" of "http://example.org/system/ErrorMissingProcess" is not a Process' in warnings
1.1875
1
scattering/van_hove.py
XiaoboLinlin/scattering
0
2480
import itertools as it import numpy as np import mdtraj as md from progressbar import ProgressBar from scattering.utils.utils import get_dt from scattering.utils.constants import get_form_factor def compute_van_hove(trj, chunk_length, water=False, r_range=(0, 1.0), bin_width=0.005, n_bins=None, self_correlation=True, periodic=True, opt=True, partial=False): """Compute the partial van Hove function of a trajectory Parameters ---------- trj : mdtraj.Trajectory trajectory on which to compute the Van Hove function chunk_length : int length of time between restarting averaging water : bool use X-ray form factors for water that account for polarization r_range : array-like, shape=(2,), optional, default=(0.0, 1.0) Minimum and maximum radii. bin_width : float, optional, default=0.005 Width of the bins in nanometers. n_bins : int, optional, default=None The number of bins. If specified, this will override the `bin_width` parameter. self_correlation : bool, default=True Whether or not to include the self-self correlations Returns ------- r : numpy.ndarray r positions generated by histogram binning g_r_t : numpy.ndarray Van Hove function at each time and position """ n_physical_atoms = len([a for a in trj.top.atoms if a.element.mass > 0]) unique_elements = list(set([a.element for a in trj.top.atoms if a.element.mass > 0])) partial_dict = dict() for elem1, elem2 in it.combinations_with_replacement(unique_elements[::-1], 2): print('doing {0} and {1} ...'.format(elem1, elem2)) r, g_r_t_partial = compute_partial_van_hove(trj=trj, chunk_length=chunk_length, selection1='element {}'.format(elem1.symbol), selection2='element {}'.format(elem2.symbol), r_range=r_range, bin_width=bin_width, n_bins=n_bins, self_correlation=self_correlation, periodic=periodic, opt=opt) partial_dict[(elem1, elem2)] = g_r_t_partial if partial: return partial_dict norm = 0 g_r_t = None for key, val in partial_dict.items(): elem1, elem2 = key concentration1 = trj.atom_slice(trj.top.select('element {}'.format(elem1.symbol))).n_atoms / n_physical_atoms concentration2 = trj.atom_slice(trj.top.select('element {}'.format(elem2.symbol))).n_atoms / n_physical_atoms form_factor1 = get_form_factor(element_name=elem1.symbol, water=water) form_factor2 = get_form_factor(element_name=elem2.symbol, water=water) coeff = form_factor1 * concentration1 * form_factor2 * concentration2 if g_r_t is None: g_r_t = np.zeros_like(val) g_r_t += val * coeff norm += coeff # Reshape g_r_t to better represent the discretization in both r and t g_r_t_final = np.empty(shape=(chunk_length, len(r))) for i in range(chunk_length): g_r_t_final[i, :] = np.mean(g_r_t[i::chunk_length], axis=0) g_r_t_final /= norm t = trj.time[:chunk_length] return r, t, g_r_t_final def compute_partial_van_hove(trj, chunk_length=10, selection1=None, selection2=None, r_range=(0, 1.0), bin_width=0.005, n_bins=200, self_correlation=True, periodic=True, opt=True): """Compute the partial van Hove function of a trajectory Parameters ---------- trj : mdtraj.Trajectory trajectory on which to compute the Van Hove function chunk_length : int length of time between restarting averaging selection1 : str selection to be considered, in the style of MDTraj atom selection selection2 : str selection to be considered, in the style of MDTraj atom selection r_range : array-like, shape=(2,), optional, default=(0.0, 1.0) Minimum and maximum radii. bin_width : float, optional, default=0.005 Width of the bins in nanometers. n_bins : int, optional, default=None The number of bins. If specified, this will override the `bin_width` parameter. self_correlation : bool, default=True Whether or not to include the self-self correlations Returns ------- r : numpy.ndarray r positions generated by histogram binning g_r_t : numpy.ndarray Van Hove function at each time and position """ unique_elements = ( set([a.element for a in trj.atom_slice(trj.top.select(selection1)).top.atoms]), set([a.element for a in trj.atom_slice(trj.top.select(selection2)).top.atoms]), ) if any([len(val) > 1 for val in unique_elements]): raise UserWarning( 'Multiple elements found in a selection(s). Results may not be ' 'direcitly comprable to scattering experiments.' ) # Don't need to store it, but this serves to check that dt is constant dt = get_dt(trj) pairs = trj.top.select_pairs(selection1=selection1, selection2=selection2) n_chunks = int(trj.n_frames / chunk_length) g_r_t = None pbar = ProgressBar() for i in pbar(range(n_chunks)): times = list() for j in range(chunk_length): times.append([chunk_length*i, chunk_length*i+j]) r, g_r_t_frame = md.compute_rdf_t( traj=trj, pairs=pairs, times=times, r_range=r_range, bin_width=bin_width, n_bins=n_bins, period_length=chunk_length, self_correlation=self_correlation, periodic=periodic, opt=opt, ) if g_r_t is None: g_r_t = np.zeros_like(g_r_t_frame) g_r_t += g_r_t_frame return r, g_r_t
2.015625
2
astroplan/constraints.py
edose/astroplan
160
2488
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Specify and constraints to determine which targets are observable for an observer. """ from __future__ import (absolute_import, division, print_function, unicode_literals) # Standard library from abc import ABCMeta, abstractmethod import datetime import time import warnings # Third-party from astropy.time import Time import astropy.units as u from astropy.coordinates import get_body, get_sun, get_moon, Galactic, SkyCoord from astropy import table import numpy as np from numpy.lib.stride_tricks import as_strided # Package from .moon import moon_illumination from .utils import time_grid_from_range from .target import get_skycoord __all__ = ["AltitudeConstraint", "AirmassConstraint", "AtNightConstraint", "is_observable", "is_always_observable", "time_grid_from_range", "GalacticLatitudeConstraint", "SunSeparationConstraint", "MoonSeparationConstraint", "MoonIlluminationConstraint", "LocalTimeConstraint", "PrimaryEclipseConstraint", "SecondaryEclipseConstraint", "Constraint", "TimeConstraint", "observability_table", "months_observable", "max_best_rescale", "min_best_rescale", "PhaseConstraint", "is_event_observable"] _current_year = time.localtime().tm_year # needed for backward compatibility _current_year_time_range = Time( # needed for backward compatibility [str(_current_year) + '-01-01', str(_current_year) + '-12-31'] ) def _make_cache_key(times, targets): """ Make a unique key to reference this combination of ``times`` and ``targets``. Often, we wish to store expensive calculations for a combination of ``targets`` and ``times`` in a cache on an ``observer``` object. This routine will provide an appropriate, hashable, key to store these calculations in a dictionary. Parameters ---------- times : `~astropy.time.Time` Array of times on which to test the constraint. targets : `~astropy.coordinates.SkyCoord` Target or list of targets. Returns ------- cache_key : tuple A hashable tuple for use as a cache key """ # make a tuple from times try: timekey = tuple(times.jd) + times.shape except BaseException: # must be scalar timekey = (times.jd,) # make hashable thing from targets coords try: if hasattr(targets, 'frame'): # treat as a SkyCoord object. Accessing the longitude # attribute of the frame data should be unique and is # quicker than accessing the ra attribute. targkey = tuple(targets.frame.data.lon.value.ravel()) + targets.shape else: # assume targets is a string. targkey = (targets,) except BaseException: targkey = (targets.frame.data.lon,) return timekey + targkey def _get_altaz(times, observer, targets, force_zero_pressure=False): """ Calculate alt/az for ``target`` at times linearly spaced between the two times in ``time_range`` with grid spacing ``time_resolution`` for ``observer``. Cache the result on the ``observer`` object. Parameters ---------- times : `~astropy.time.Time` Array of times on which to test the constraint. targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`} Target or list of targets. observer : `~astroplan.Observer` The observer who has constraints ``constraints``. force_zero_pressure : bool Forcefully use 0 pressure. Returns ------- altaz_dict : dict Dictionary containing two key-value pairs. (1) 'times' contains the times for the alt/az computations, (2) 'altaz' contains the corresponding alt/az coordinates at those times. """ if not hasattr(observer, '_altaz_cache'): observer._altaz_cache = {} # convert times, targets to tuple for hashing aakey = _make_cache_key(times, targets) if aakey not in observer._altaz_cache: try: if force_zero_pressure: observer_old_pressure = observer.pressure observer.pressure = 0 altaz = observer.altaz(times, targets, grid_times_targets=False) observer._altaz_cache[aakey] = dict(times=times, altaz=altaz) finally: if force_zero_pressure: observer.pressure = observer_old_pressure return observer._altaz_cache[aakey] def _get_moon_data(times, observer, force_zero_pressure=False): """ Calculate moon altitude az and illumination for an array of times for ``observer``. Cache the result on the ``observer`` object. Parameters ---------- times : `~astropy.time.Time` Array of times on which to test the constraint. observer : `~astroplan.Observer` The observer who has constraints ``constraints``. force_zero_pressure : bool Forcefully use 0 pressure. Returns ------- moon_dict : dict Dictionary containing three key-value pairs. (1) 'times' contains the times for the computations, (2) 'altaz' contains the corresponding alt/az coordinates at those times and (3) contains the moon illumination for those times. """ if not hasattr(observer, '_moon_cache'): observer._moon_cache = {} # convert times to tuple for hashing aakey = _make_cache_key(times, 'moon') if aakey not in observer._moon_cache: try: if force_zero_pressure: observer_old_pressure = observer.pressure observer.pressure = 0 altaz = observer.moon_altaz(times) illumination = np.array(moon_illumination(times)) observer._moon_cache[aakey] = dict(times=times, illum=illumination, altaz=altaz) finally: if force_zero_pressure: observer.pressure = observer_old_pressure return observer._moon_cache[aakey] def _get_meridian_transit_times(times, observer, targets): """ Calculate next meridian transit for an array of times for ``targets`` and ``observer``. Cache the result on the ``observer`` object. Parameters ---------- times : `~astropy.time.Time` Array of times on which to test the constraint observer : `~astroplan.Observer` The observer who has constraints ``constraints`` targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`} Target or list of targets Returns ------- time_dict : dict Dictionary containing a key-value pair. 'times' contains the meridian_transit times. """ if not hasattr(observer, '_meridian_transit_cache'): observer._meridian_transit_cache = {} # convert times to tuple for hashing aakey = _make_cache_key(times, targets) if aakey not in observer._meridian_transit_cache: meridian_transit_times = observer.target_meridian_transit_time(times, targets) observer._meridian_transit_cache[aakey] = dict(times=meridian_transit_times) return observer._meridian_transit_cache[aakey] @abstractmethod class Constraint(object): """ Abstract class for objects defining observational constraints. """ __metaclass__ = ABCMeta def __call__(self, observer, targets, times=None, time_range=None, time_grid_resolution=0.5*u.hour, grid_times_targets=False): """ Compute the constraint for this class Parameters ---------- observer : `~astroplan.Observer` the observation location from which to apply the constraints targets : sequence of `~astroplan.Target` The targets on which to apply the constraints. times : `~astropy.time.Time` The times to compute the constraint. WHAT HAPPENS WHEN BOTH TIMES AND TIME_RANGE ARE SET? time_range : `~astropy.time.Time` (length = 2) Lower and upper bounds on time sequence. time_grid_resolution : `~astropy.units.quantity` Time-grid spacing grid_times_targets : bool if True, grids the constraint result with targets along the first index and times along the second. Otherwise, we rely on broadcasting the shapes together using standard numpy rules. Returns ------- constraint_result : 1D or 2D array of float or bool The constraints. If 2D with targets along the first index and times along the second. """ if times is None and time_range is not None: times = time_grid_from_range(time_range, time_resolution=time_grid_resolution) if grid_times_targets: targets = get_skycoord(targets) # TODO: these broadcasting operations are relatively slow # but there is potential for huge speedup if the end user # disables gridding and re-shapes the coords themselves # prior to evaluating multiple constraints. if targets.isscalar: # ensure we have a (1, 1) shape coord targets = SkyCoord(np.tile(targets, 1))[:, np.newaxis] else: targets = targets[..., np.newaxis] times, targets = observer._preprocess_inputs(times, targets, grid_times_targets=False) result = self.compute_constraint(times, observer, targets) # make sure the output has the same shape as would result from # broadcasting times and targets against each other if targets is not None: # broadcasting times v targets is slow due to # complex nature of these objects. We make # to simple numpy arrays of the same shape and # broadcast these to find the correct shape shp1, shp2 = times.shape, targets.shape x = np.array([1]) a = as_strided(x, shape=shp1, strides=[0] * len(shp1)) b = as_strided(x, shape=shp2, strides=[0] * len(shp2)) output_shape = np.broadcast(a, b).shape if output_shape != np.array(result).shape: result = np.broadcast_to(result, output_shape) return result @abstractmethod def compute_constraint(self, times, observer, targets): """ Actually do the real work of computing the constraint. Subclasses override this. Parameters ---------- times : `~astropy.time.Time` The times to compute the constraint observer : `~astroplan.Observer` the observaton location from which to apply the constraints targets : sequence of `~astroplan.Target` The targets on which to apply the constraints. Returns ------- constraint_result : 2D array of float or bool The constraints, with targets along the first index and times along the second. """ # Should be implemented on each subclass of Constraint raise NotImplementedError class AltitudeConstraint(Constraint): """ Constrain the altitude of the target. .. note:: This can misbehave if you try to constrain negative altitudes, as the `~astropy.coordinates.AltAz` frame tends to mishandle negative Parameters ---------- min : `~astropy.units.Quantity` or `None` Minimum altitude of the target (inclusive). `None` indicates no limit. max : `~astropy.units.Quantity` or `None` Maximum altitude of the target (inclusive). `None` indicates no limit. boolean_constraint : bool If True, the constraint is treated as a boolean (True for within the limits and False for outside). If False, the constraint returns a float on [0, 1], where 0 is the min altitude and 1 is the max. """ def __init__(self, min=None, max=None, boolean_constraint=True): if min is None: self.min = -90*u.deg else: self.min = min if max is None: self.max = 90*u.deg else: self.max = max self.boolean_constraint = boolean_constraint def compute_constraint(self, times, observer, targets): cached_altaz = _get_altaz(times, observer, targets) alt = cached_altaz['altaz'].alt if self.boolean_constraint: lowermask = self.min <= alt uppermask = alt <= self.max return lowermask & uppermask else: return max_best_rescale(alt, self.min, self.max) class AirmassConstraint(AltitudeConstraint): """ Constrain the airmass of a target. In the current implementation the airmass is approximated by the secant of the zenith angle. .. note:: The ``max`` and ``min`` arguments appear in the order (max, min) in this initializer to support the common case for users who care about the upper limit on the airmass (``max``) and not the lower limit. Parameters ---------- max : float or `None` Maximum airmass of the target. `None` indicates no limit. min : float or `None` Minimum airmass of the target. `None` indicates no limit. boolean_contstraint : bool Examples -------- To create a constraint that requires the airmass be "better than 2", i.e. at a higher altitude than airmass=2:: AirmassConstraint(2) """ def __init__(self, max=None, min=1, boolean_constraint=True): self.min = min self.max = max self.boolean_constraint = boolean_constraint def compute_constraint(self, times, observer, targets): cached_altaz = _get_altaz(times, observer, targets) secz = cached_altaz['altaz'].secz.value if self.boolean_constraint: if self.min is None and self.max is not None: mask = secz <= self.max elif self.max is None and self.min is not None: mask = self.min <= secz elif self.min is not None and self.max is not None: mask = (self.min <= secz) & (secz <= self.max) else: raise ValueError("No max and/or min specified in " "AirmassConstraint.") return mask else: if self.max is None: raise ValueError("Cannot have a float AirmassConstraint if max is None.") else: mx = self.max mi = 1 if self.min is None else self.min # values below 1 should be disregarded return min_best_rescale(secz, mi, mx, less_than_min=0) class AtNightConstraint(Constraint): """ Constrain the Sun to be below ``horizon``. """ @u.quantity_input(horizon=u.deg) def __init__(self, max_solar_altitude=0*u.deg, force_pressure_zero=True): """ Parameters ---------- max_solar_altitude : `~astropy.units.Quantity` The altitude of the sun below which it is considered to be "night" (inclusive). force_pressure_zero : bool (optional) Force the pressure to zero for solar altitude calculations. This avoids errors in the altitude of the Sun that can occur when the Sun is below the horizon and the corrections for atmospheric refraction return nonsense values. """ self.max_solar_altitude = max_solar_altitude self.force_pressure_zero = force_pressure_zero @classmethod def twilight_civil(cls, **kwargs): """ Consider nighttime as time between civil twilights (-6 degrees). """ return cls(max_solar_altitude=-6*u.deg, **kwargs) @classmethod def twilight_nautical(cls, **kwargs): """ Consider nighttime as time between nautical twilights (-12 degrees). """ return cls(max_solar_altitude=-12*u.deg, **kwargs) @classmethod def twilight_astronomical(cls, **kwargs): """ Consider nighttime as time between astronomical twilights (-18 degrees). """ return cls(max_solar_altitude=-18*u.deg, **kwargs) def _get_solar_altitudes(self, times, observer, targets): if not hasattr(observer, '_altaz_cache'): observer._altaz_cache = {} aakey = _make_cache_key(times, 'sun') if aakey not in observer._altaz_cache: try: if self.force_pressure_zero: observer_old_pressure = observer.pressure observer.pressure = 0 # find solar altitude at these times altaz = observer.altaz(times, get_sun(times)) altitude = altaz.alt # cache the altitude observer._altaz_cache[aakey] = dict(times=times, altitude=altitude) finally: if self.force_pressure_zero: observer.pressure = observer_old_pressure else: altitude = observer._altaz_cache[aakey]['altitude'] return altitude def compute_constraint(self, times, observer, targets): solar_altitude = self._get_solar_altitudes(times, observer, targets) mask = solar_altitude <= self.max_solar_altitude return mask class GalacticLatitudeConstraint(Constraint): """ Constrain the distance between the Galactic plane and some targets. """ def __init__(self, min=None, max=None): """ Parameters ---------- min : `~astropy.units.Quantity` or `None` (optional) Minimum acceptable Galactic latitude of target (inclusive). `None` indicates no limit. max : `~astropy.units.Quantity` or `None` (optional) Minimum acceptable Galactic latitude of target (inclusive). `None` indicates no limit. """ self.min = min self.max = max def compute_constraint(self, times, observer, targets): separation = abs(targets.transform_to(Galactic).b) if self.min is None and self.max is not None: mask = self.max >= separation elif self.max is None and self.min is not None: mask = self.min <= separation elif self.min is not None and self.max is not None: mask = ((self.min <= separation) & (separation <= self.max)) else: raise ValueError("No max and/or min specified in " "GalacticLatitudeConstraint.") return mask class SunSeparationConstraint(Constraint): """ Constrain the distance between the Sun and some targets. """ def __init__(self, min=None, max=None): """ Parameters ---------- min : `~astropy.units.Quantity` or `None` (optional) Minimum acceptable separation between Sun and target (inclusive). `None` indicates no limit. max : `~astropy.units.Quantity` or `None` (optional) Maximum acceptable separation between Sun and target (inclusive). `None` indicates no limit. """ self.min = min self.max = max def compute_constraint(self, times, observer, targets): # use get_body rather than get sun here, since # it returns the Sun's coordinates in an observer # centred frame, so the separation is as-seen # by the observer. # 'get_sun' returns ICRS coords. sun = get_body('sun', times, location=observer.location) solar_separation = sun.separation(targets) if self.min is None and self.max is not None: mask = self.max >= solar_separation elif self.max is None and self.min is not None: mask = self.min <= solar_separation elif self.min is not None and self.max is not None: mask = ((self.min <= solar_separation) & (solar_separation <= self.max)) else: raise ValueError("No max and/or min specified in " "SunSeparationConstraint.") return mask class MoonSeparationConstraint(Constraint): """ Constrain the distance between the Earth's moon and some targets. """ def __init__(self, min=None, max=None, ephemeris=None): """ Parameters ---------- min : `~astropy.units.Quantity` or `None` (optional) Minimum acceptable separation between moon and target (inclusive). `None` indicates no limit. max : `~astropy.units.Quantity` or `None` (optional) Maximum acceptable separation between moon and target (inclusive). `None` indicates no limit. ephemeris : str, optional Ephemeris to use. If not given, use the one set with ``astropy.coordinates.solar_system_ephemeris.set`` (which is set to 'builtin' by default). """ self.min = min self.max = max self.ephemeris = ephemeris def compute_constraint(self, times, observer, targets): # removed the location argument here, which causes small <1 deg # innacuracies, but it is needed until astropy PR #5897 is released # which should be astropy 1.3.2 moon = get_moon(times, ephemeris=self.ephemeris) # note to future editors - the order matters here # moon.separation(targets) is NOT the same as targets.separation(moon) # the former calculates the separation in the frame of the moon coord # which is GCRS, and that is what we want. moon_separation = moon.separation(targets) if self.min is None and self.max is not None: mask = self.max >= moon_separation elif self.max is None and self.min is not None: mask = self.min <= moon_separation elif self.min is not None and self.max is not None: mask = ((self.min <= moon_separation) & (moon_separation <= self.max)) else: raise ValueError("No max and/or min specified in " "MoonSeparationConstraint.") return mask class MoonIlluminationConstraint(Constraint): """ Constrain the fractional illumination of the Earth's moon. Constraint is also satisfied if the Moon has set. """ def __init__(self, min=None, max=None, ephemeris=None): """ Parameters ---------- min : float or `None` (optional) Minimum acceptable fractional illumination (inclusive). `None` indicates no limit. max : float or `None` (optional) Maximum acceptable fractional illumination (inclusive). `None` indicates no limit. ephemeris : str, optional Ephemeris to use. If not given, use the one set with `~astropy.coordinates.solar_system_ephemeris` (which is set to 'builtin' by default). """ self.min = min self.max = max self.ephemeris = ephemeris @classmethod def dark(cls, min=None, max=0.25, **kwargs): """ initialize a `~astroplan.constraints.MoonIlluminationConstraint` with defaults of no minimum and a maximum of 0.25 Parameters ---------- min : float or `None` (optional) Minimum acceptable fractional illumination (inclusive). `None` indicates no limit. max : float or `None` (optional) Maximum acceptable fractional illumination (inclusive). `None` indicates no limit. """ return cls(min, max, **kwargs) @classmethod def grey(cls, min=0.25, max=0.65, **kwargs): """ initialize a `~astroplan.constraints.MoonIlluminationConstraint` with defaults of a minimum of 0.25 and a maximum of 0.65 Parameters ---------- min : float or `None` (optional) Minimum acceptable fractional illumination (inclusive). `None` indicates no limit. max : float or `None` (optional) Maximum acceptable fractional illumination (inclusive). `None` indicates no limit. """ return cls(min, max, **kwargs) @classmethod def bright(cls, min=0.65, max=None, **kwargs): """ initialize a `~astroplan.constraints.MoonIlluminationConstraint` with defaults of a minimum of 0.65 and no maximum Parameters ---------- min : float or `None` (optional) Minimum acceptable fractional illumination (inclusive). `None` indicates no limit. max : float or `None` (optional) Maximum acceptable fractional illumination (inclusive). `None` indicates no limit. """ return cls(min, max, **kwargs) def compute_constraint(self, times, observer, targets): # first is the moon up? cached_moon = _get_moon_data(times, observer) moon_alt = cached_moon['altaz'].alt moon_down_mask = moon_alt < 0 moon_up_mask = moon_alt >= 0 illumination = cached_moon['illum'] if self.min is None and self.max is not None: mask = (self.max >= illumination) | moon_down_mask elif self.max is None and self.min is not None: mask = (self.min <= illumination) & moon_up_mask elif self.min is not None and self.max is not None: mask = ((self.min <= illumination) & (illumination <= self.max)) & moon_up_mask else: raise ValueError("No max and/or min specified in " "MoonSeparationConstraint.") return mask class LocalTimeConstraint(Constraint): """ Constrain the observable hours. """ def __init__(self, min=None, max=None): """ Parameters ---------- min : `~datetime.time` Earliest local time (inclusive). `None` indicates no limit. max : `~datetime.time` Latest local time (inclusive). `None` indicates no limit. Examples -------- Constrain the observations to targets that are observable between 23:50 and 04:08 local time: >>> from astroplan import Observer >>> from astroplan.constraints import LocalTimeConstraint >>> import datetime as dt >>> subaru = Observer.at_site("Subaru", timezone="US/Hawaii") >>> # bound times between 23:50 and 04:08 local Hawaiian time >>> constraint = LocalTimeConstraint(min=dt.time(23,50), max=dt.time(4,8)) """ self.min = min self.max = max if self.min is None and self.max is None: raise ValueError("You must at least supply either a minimum or a maximum time.") if self.min is not None: if not isinstance(self.min, datetime.time): raise TypeError("Time limits must be specified as datetime.time objects.") if self.max is not None: if not isinstance(self.max, datetime.time): raise TypeError("Time limits must be specified as datetime.time objects.") def compute_constraint(self, times, observer, targets): timezone = None # get timezone from time objects, or from observer if self.min is not None: timezone = self.min.tzinfo elif self.max is not None: timezone = self.max.tzinfo if timezone is None: timezone = observer.timezone if self.min is not None: min_time = self.min else: min_time = self.min = datetime.time(0, 0, 0) if self.max is not None: max_time = self.max else: max_time = datetime.time(23, 59, 59) # If time limits occur on same day: if min_time < max_time: try: mask = np.array([min_time <= t.time() <= max_time for t in times.datetime]) except BaseException: # use np.bool so shape queries don't cause problems mask = np.bool_(min_time <= times.datetime.time() <= max_time) # If time boundaries straddle midnight: else: try: mask = np.array([(t.time() >= min_time) or (t.time() <= max_time) for t in times.datetime]) except BaseException: mask = np.bool_((times.datetime.time() >= min_time) or (times.datetime.time() <= max_time)) return mask class TimeConstraint(Constraint): """Constrain the observing time to be within certain time limits. An example use case for this class would be to associate an acceptable time range with a specific observing block. This can be useful if not all observing blocks are valid over the time limits used in calls to `is_observable` or `is_always_observable`. """ def __init__(self, min=None, max=None): """ Parameters ---------- min : `~astropy.time.Time` Earliest time (inclusive). `None` indicates no limit. max : `~astropy.time.Time` Latest time (inclusive). `None` indicates no limit. Examples -------- Constrain the observations to targets that are observable between 2016-03-28 and 2016-03-30: >>> from astroplan import Observer >>> from astropy.time import Time >>> subaru = Observer.at_site("Subaru") >>> t1 = Time("2016-03-28T12:00:00") >>> t2 = Time("2016-03-30T12:00:00") >>> constraint = TimeConstraint(t1,t2) """ self.min = min self.max = max if self.min is None and self.max is None: raise ValueError("You must at least supply either a minimum or a " "maximum time.") if self.min is not None: if not isinstance(self.min, Time): raise TypeError("Time limits must be specified as " "astropy.time.Time objects.") if self.max is not None: if not isinstance(self.max, Time): raise TypeError("Time limits must be specified as " "astropy.time.Time objects.") def compute_constraint(self, times, observer, targets): with warnings.catch_warnings(): warnings.simplefilter('ignore') min_time = Time("1950-01-01T00:00:00") if self.min is None else self.min max_time = Time("2120-01-01T00:00:00") if self.max is None else self.max mask = np.logical_and(times > min_time, times < max_time) return mask class PrimaryEclipseConstraint(Constraint): """ Constrain observations to times during primary eclipse. """ def __init__(self, eclipsing_system): """ Parameters ---------- eclipsing_system : `~astroplan.periodic.EclipsingSystem` System which must be in primary eclipse. """ self.eclipsing_system = eclipsing_system def compute_constraint(self, times, observer=None, targets=None): mask = self.eclipsing_system.in_primary_eclipse(times) return mask class SecondaryEclipseConstraint(Constraint): """ Constrain observations to times during secondary eclipse. """ def __init__(self, eclipsing_system): """ Parameters ---------- eclipsing_system : `~astroplan.periodic.EclipsingSystem` System which must be in secondary eclipse. """ self.eclipsing_system = eclipsing_system def compute_constraint(self, times, observer=None, targets=None): mask = self.eclipsing_system.in_secondary_eclipse(times) return mask class PhaseConstraint(Constraint): """ Constrain observations to times in some range of phases for a periodic event (e.g.~transiting exoplanets, eclipsing binaries). """ def __init__(self, periodic_event, min=None, max=None): """ Parameters ---------- periodic_event : `~astroplan.periodic.PeriodicEvent` or subclass System on which to compute the phase. For example, the system could be an eclipsing or non-eclipsing binary, or exoplanet system. min : float (optional) Minimum phase (inclusive) on interval [0, 1). Default is zero. max : float (optional) Maximum phase (inclusive) on interval [0, 1). Default is one. Examples -------- To constrain observations on orbital phases between 0.4 and 0.6, >>> from astroplan import PeriodicEvent >>> from astropy.time import Time >>> import astropy.units as u >>> binary = PeriodicEvent(epoch=Time('2017-01-01 02:00'), period=1*u.day) >>> constraint = PhaseConstraint(binary, min=0.4, max=0.6) The minimum and maximum phase must be described on the interval [0, 1). To constrain observations on orbital phases between 0.6 and 1.2, for example, you should subtract one from the second number: >>> constraint = PhaseConstraint(binary, min=0.6, max=0.2) """ self.periodic_event = periodic_event if (min < 0) or (min > 1) or (max < 0) or (max > 1): raise ValueError('The minimum of the PhaseConstraint must be within' ' the interval [0, 1).') self.min = min if min is not None else 0.0 self.max = max if max is not None else 1.0 def compute_constraint(self, times, observer=None, targets=None): phase = self.periodic_event.phase(times) mask = np.where(self.max > self.min, (phase >= self.min) & (phase <= self.max), (phase >= self.min) | (phase <= self.max)) return mask def is_always_observable(constraints, observer, targets, times=None, time_range=None, time_grid_resolution=0.5*u.hour): """ A function to determine whether ``targets`` are always observable throughout ``time_range`` given constraints in the ``constraints_list`` for a particular ``observer``. Parameters ---------- constraints : list or `~astroplan.constraints.Constraint` Observational constraint(s) observer : `~astroplan.Observer` The observer who has constraints ``constraints`` targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`} Target or list of targets times : `~astropy.time.Time` (optional) Array of times on which to test the constraint time_range : `~astropy.time.Time` (optional) Lower and upper bounds on time sequence, with spacing ``time_resolution``. This will be passed as the first argument into `~astroplan.time_grid_from_range`. time_grid_resolution : `~astropy.units.Quantity` (optional) If ``time_range`` is specified, determine whether constraints are met between test times in ``time_range`` by checking constraint at linearly-spaced times separated by ``time_resolution``. Default is 0.5 hours. Returns ------- ever_observable : list List of booleans of same length as ``targets`` for whether or not each target is observable in the time range given the constraints. """ if not hasattr(constraints, '__len__'): constraints = [constraints] applied_constraints = [constraint(observer, targets, times=times, time_range=time_range, time_grid_resolution=time_grid_resolution, grid_times_targets=True) for constraint in constraints] constraint_arr = np.logical_and.reduce(applied_constraints) return np.all(constraint_arr, axis=1) def is_observable(constraints, observer, targets, times=None, time_range=None, time_grid_resolution=0.5*u.hour): """ Determines if the ``targets`` are observable during ``time_range`` given constraints in ``constraints_list`` for a particular ``observer``. Parameters ---------- constraints : list or `~astroplan.constraints.Constraint` Observational constraint(s) observer : `~astroplan.Observer` The observer who has constraints ``constraints`` targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`} Target or list of targets times : `~astropy.time.Time` (optional) Array of times on which to test the constraint time_range : `~astropy.time.Time` (optional) Lower and upper bounds on time sequence, with spacing ``time_resolution``. This will be passed as the first argument into `~astroplan.time_grid_from_range`. time_grid_resolution : `~astropy.units.Quantity` (optional) If ``time_range`` is specified, determine whether constraints are met between test times in ``time_range`` by checking constraint at linearly-spaced times separated by ``time_resolution``. Default is 0.5 hours. Returns ------- ever_observable : list List of booleans of same length as ``targets`` for whether or not each target is ever observable in the time range given the constraints. """ if not hasattr(constraints, '__len__'): constraints = [constraints] applied_constraints = [constraint(observer, targets, times=times, time_range=time_range, time_grid_resolution=time_grid_resolution, grid_times_targets=True) for constraint in constraints] constraint_arr = np.logical_and.reduce(applied_constraints) return np.any(constraint_arr, axis=1) def is_event_observable(constraints, observer, target, times=None, times_ingress_egress=None): """ Determines if the ``target`` is observable at each time in ``times``, given constraints in ``constraints`` for a particular ``observer``. Parameters ---------- constraints : list or `~astroplan.constraints.Constraint` Observational constraint(s) observer : `~astroplan.Observer` The observer who has constraints ``constraints`` target : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`} Target times : `~astropy.time.Time` (optional) Array of mid-event times on which to test the constraints times_ingress_egress : `~astropy.time.Time` (optional) Array of ingress and egress times for ``N`` events, with shape (``N``, 2). Returns ------- event_observable : `~numpy.ndarray` Array of booleans of same length as ``times`` for whether or not the target is ever observable at each time, given the constraints. """ if not hasattr(constraints, '__len__'): constraints = [constraints] if times is not None: applied_constraints = [constraint(observer, target, times=times, grid_times_targets=True) for constraint in constraints] constraint_arr = np.logical_and.reduce(applied_constraints) else: times_ing = times_ingress_egress[:, 0] times_egr = times_ingress_egress[:, 1] applied_constraints_ing = [constraint(observer, target, times=times_ing, grid_times_targets=True) for constraint in constraints] applied_constraints_egr = [constraint(observer, target, times=times_egr, grid_times_targets=True) for constraint in constraints] constraint_arr = np.logical_and(np.logical_and.reduce(applied_constraints_ing), np.logical_and.reduce(applied_constraints_egr)) return constraint_arr def months_observable(constraints, observer, targets, time_range=_current_year_time_range, time_grid_resolution=0.5*u.hour): """ Determines which month the specified ``targets`` are observable for a specific ``observer``, given the supplied ``constraints``. Parameters ---------- constraints : list or `~astroplan.constraints.Constraint` Observational constraint(s) observer : `~astroplan.Observer` The observer who has constraints ``constraints`` targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`} Target or list of targets time_range : `~astropy.time.Time` (optional) Lower and upper bounds on time sequence If ``time_range`` is not specified, defaults to current year (localtime) time_grid_resolution : `~astropy.units.Quantity` (optional) If ``time_range`` is specified, determine whether constraints are met between test times in ``time_range`` by checking constraint at linearly-spaced times separated by ``time_resolution``. Default is 0.5 hours. Returns ------- observable_months : list List of sets of unique integers representing each month that a target is observable, one set per target. These integers are 1-based so that January maps to 1, February maps to 2, etc. """ # TODO: This method could be sped up a lot by dropping to the trigonometric # altitude calculations. if not hasattr(constraints, '__len__'): constraints = [constraints] times = time_grid_from_range(time_range, time_grid_resolution) # TODO: This method could be sped up a lot by dropping to the trigonometric # altitude calculations. applied_constraints = [constraint(observer, targets, times=times, grid_times_targets=True) for constraint in constraints] constraint_arr = np.logical_and.reduce(applied_constraints) months_observable = [] for target, observable in zip(targets, constraint_arr): s = set([t.datetime.month for t in times[observable]]) months_observable.append(s) return months_observable def observability_table(constraints, observer, targets, times=None, time_range=None, time_grid_resolution=0.5*u.hour): """ Creates a table with information about observability for all the ``targets`` over the requested ``time_range``, given the constraints in ``constraints_list`` for ``observer``. Parameters ---------- constraints : list or `~astroplan.constraints.Constraint` Observational constraint(s) observer : `~astroplan.Observer` The observer who has constraints ``constraints`` targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`} Target or list of targets times : `~astropy.time.Time` (optional) Array of times on which to test the constraint time_range : `~astropy.time.Time` (optional) Lower and upper bounds on time sequence, with spacing ``time_resolution``. This will be passed as the first argument into `~astroplan.time_grid_from_range`. If a single (scalar) time, the table will be for a 24 hour period centered on that time. time_grid_resolution : `~astropy.units.Quantity` (optional) If ``time_range`` is specified, determine whether constraints are met between test times in ``time_range`` by checking constraint at linearly-spaced times separated by ``time_resolution``. Default is 0.5 hours. Returns ------- observability_table : `~astropy.table.Table` A Table containing the observability information for each of the ``targets``. The table contains four columns with information about the target and it's observability: ``'target name'``, ``'ever observable'``, ``'always observable'``, and ``'fraction of time observable'``. The column ``'time observable'`` will also be present if the ``time_range`` is given as a scalar. It also contains metadata entries ``'times'`` (with an array of all the times), ``'observer'`` (the `~astroplan.Observer` object), and ``'constraints'`` (containing the supplied ``constraints``). """ if not hasattr(constraints, '__len__'): constraints = [constraints] is_24hr_table = False if hasattr(time_range, 'isscalar') and time_range.isscalar: time_range = (time_range-12*u.hour, time_range+12*u.hour) is_24hr_table = True applied_constraints = [constraint(observer, targets, times=times, time_range=time_range, time_grid_resolution=time_grid_resolution, grid_times_targets=True) for constraint in constraints] constraint_arr = np.logical_and.reduce(applied_constraints) colnames = ['target name', 'ever observable', 'always observable', 'fraction of time observable'] target_names = [target.name for target in targets] ever_obs = np.any(constraint_arr, axis=1) always_obs = np.all(constraint_arr, axis=1) frac_obs = np.sum(constraint_arr, axis=1) / constraint_arr.shape[1] tab = table.Table(names=colnames, data=[target_names, ever_obs, always_obs, frac_obs]) if times is None and time_range is not None: times = time_grid_from_range(time_range, time_resolution=time_grid_resolution) if is_24hr_table: tab['time observable'] = tab['fraction of time observable'] * 24*u.hour tab.meta['times'] = times.datetime tab.meta['observer'] = observer tab.meta['constraints'] = constraints return tab def min_best_rescale(vals, min_val, max_val, less_than_min=1): """ rescales an input array ``vals`` to be a score (between zero and one), where the ``min_val`` goes to one, and the ``max_val`` goes to zero. Parameters ---------- vals : array-like the values that need to be rescaled to be between 0 and 1 min_val : float worst acceptable value (rescales to 0) max_val : float best value cared about (rescales to 1) less_than_min : 0 or 1 what is returned for ``vals`` below ``min_val``. (in some cases anything less than ``min_val`` should also return one, in some cases it should return zero) Returns ------- array of floats between 0 and 1 inclusive rescaled so that ``vals`` equal to ``max_val`` equal 0 and those equal to ``min_val`` equal 1 Examples -------- rescale airmasses to between 0 and 1, with the best (1) and worst (2.25). All values outside the range should return 0. >>> from astroplan.constraints import min_best_rescale >>> import numpy as np >>> airmasses = np.array([1, 1.5, 2, 3, 0]) >>> min_best_rescale(airmasses, 1, 2.25, less_than_min = 0) # doctest: +FLOAT_CMP array([ 1. , 0.6, 0.2, 0. , 0. ]) """ rescaled = (vals - max_val) / (min_val - max_val) below = vals < min_val above = vals > max_val rescaled[below] = less_than_min rescaled[above] = 0 return rescaled def max_best_rescale(vals, min_val, max_val, greater_than_max=1): """ rescales an input array ``vals`` to be a score (between zero and one), where the ``max_val`` goes to one, and the ``min_val`` goes to zero. Parameters ---------- vals : array-like the values that need to be rescaled to be between 0 and 1 min_val : float worst acceptable value (rescales to 0) max_val : float best value cared about (rescales to 1) greater_than_max : 0 or 1 what is returned for ``vals`` above ``max_val``. (in some cases anything higher than ``max_val`` should also return one, in some cases it should return zero) Returns ------- array of floats between 0 and 1 inclusive rescaled so that ``vals`` equal to ``min_val`` equal 0 and those equal to ``max_val`` equal 1 Examples -------- rescale an array of altitudes to be between 0 and 1, with the best (60) going to 1 and worst (35) going to 0. For values outside the range, the rescale should return 0 below 35 and 1 above 60. >>> from astroplan.constraints import max_best_rescale >>> import numpy as np >>> altitudes = np.array([20, 30, 40, 45, 55, 70]) >>> max_best_rescale(altitudes, 35, 60) # doctest: +FLOAT_CMP array([ 0. , 0. , 0.2, 0.4, 0.8, 1. ]) """ rescaled = (vals - min_val) / (max_val - min_val) below = vals < min_val above = vals > max_val rescaled[below] = 0 rescaled[above] = greater_than_max return rescaled
2.296875
2
lbry/wallet/server/peer.py
snapperVibes/lbry-sdk
2
2504
# Copyright (c) 2017, <NAME> # # All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Representation of a peer server.""" from ipaddress import ip_address from lbry.wallet.server import util from lbry.wallet.server.util import cachedproperty from typing import Dict class Peer: # Protocol version ATTRS = ('host', 'features', # metadata 'source', 'ip_addr', 'last_good', 'last_try', 'try_count') FEATURES = ('pruning', 'server_version', 'protocol_min', 'protocol_max', 'ssl_port', 'tcp_port') # This should be set by the application DEFAULT_PORTS: Dict[str, int] = {} def __init__(self, host, features, source='unknown', ip_addr=None, last_good=0, last_try=0, try_count=0): """Create a peer given a host name (or IP address as a string), a dictionary of features, and a record of the source.""" assert isinstance(host, str) assert isinstance(features, dict) assert host in features.get('hosts', {}) self.host = host self.features = features.copy() # Canonicalize / clean-up for feature in self.FEATURES: self.features[feature] = getattr(self, feature) # Metadata self.source = source self.ip_addr = ip_addr # last_good represents the last connection that was # successful *and* successfully verified, at which point # try_count is set to 0. Failure to connect or failure to # verify increment the try_count. self.last_good = last_good self.last_try = last_try self.try_count = try_count # Transient, non-persisted metadata self.bad = False self.other_port_pairs = set() self.status = 2 @classmethod def peers_from_features(cls, features, source): peers = [] if isinstance(features, dict): hosts = features.get('hosts') if isinstance(hosts, dict): peers = [Peer(host, features, source=source) for host in hosts if isinstance(host, str)] return peers @classmethod def deserialize(cls, item): """Deserialize from a dictionary.""" return cls(**item) def matches(self, peers): """Return peers whose host matches our hostname or IP address. Additionally include all peers whose IP address matches our hostname if that is an IP address. """ candidates = (self.host.lower(), self.ip_addr) return [peer for peer in peers if peer.host.lower() in candidates or peer.ip_addr == self.host] def __str__(self): return self.host def update_features(self, features): """Update features in-place.""" try: tmp = Peer(self.host, features) except Exception: pass else: self.update_features_from_peer(tmp) def update_features_from_peer(self, peer): if peer != self: self.features = peer.features for feature in self.FEATURES: setattr(self, feature, getattr(peer, feature)) def connection_port_pairs(self): """Return a list of (kind, port) pairs to try when making a connection.""" # Use a list not a set - it's important to try the registered # ports first. pairs = [('SSL', self.ssl_port), ('TCP', self.tcp_port)] while self.other_port_pairs: pairs.append(self.other_port_pairs.pop()) return [pair for pair in pairs if pair[1]] def mark_bad(self): """Mark as bad to avoid reconnects but also to remember for a while.""" self.bad = True def check_ports(self, other): """Remember differing ports in case server operator changed them or removed one.""" if other.ssl_port != self.ssl_port: self.other_port_pairs.add(('SSL', other.ssl_port)) if other.tcp_port != self.tcp_port: self.other_port_pairs.add(('TCP', other.tcp_port)) return bool(self.other_port_pairs) @cachedproperty def is_tor(self): return self.host.endswith('.onion') @cachedproperty def is_valid(self): ip = self.ip_address if ip: return ((ip.is_global or ip.is_private) and not (ip.is_multicast or ip.is_unspecified)) return util.is_valid_hostname(self.host) @cachedproperty def is_public(self): ip = self.ip_address if ip: return self.is_valid and not ip.is_private else: return self.is_valid and self.host != 'localhost' @cachedproperty def ip_address(self): """The host as a python ip_address object, or None.""" try: return ip_address(self.host) except ValueError: return None def bucket(self): if self.is_tor: return 'onion' if not self.ip_addr: return '' return tuple(self.ip_addr.split('.')[:2]) def serialize(self): """Serialize to a dictionary.""" return {attr: getattr(self, attr) for attr in self.ATTRS} def _port(self, key): hosts = self.features.get('hosts') if isinstance(hosts, dict): host = hosts.get(self.host) port = self._integer(key, host) if port and 0 < port < 65536: return port return None def _integer(self, key, d=None): d = d or self.features result = d.get(key) if isinstance(d, dict) else None if isinstance(result, str): try: result = int(result) except ValueError: pass return result if isinstance(result, int) else None def _string(self, key): result = self.features.get(key) return result if isinstance(result, str) else None @cachedproperty def genesis_hash(self): """Returns None if no SSL port, otherwise the port as an integer.""" return self._string('genesis_hash') @cachedproperty def ssl_port(self): """Returns None if no SSL port, otherwise the port as an integer.""" return self._port('ssl_port') @cachedproperty def tcp_port(self): """Returns None if no TCP port, otherwise the port as an integer.""" return self._port('tcp_port') @cachedproperty def server_version(self): """Returns the server version as a string if known, otherwise None.""" return self._string('server_version') @cachedproperty def pruning(self): """Returns the pruning level as an integer. None indicates no pruning.""" pruning = self._integer('pruning') if pruning and pruning > 0: return pruning return None def _protocol_version_string(self, key): version_str = self.features.get(key) ptuple = util.protocol_tuple(version_str) return util.version_string(ptuple) @cachedproperty def protocol_min(self): """Minimum protocol version as a string, e.g., 1.0""" return self._protocol_version_string('protocol_min') @cachedproperty def protocol_max(self): """Maximum protocol version as a string, e.g., 1.1""" return self._protocol_version_string('protocol_max') def to_tuple(self): """The tuple ((ip, host, details) expected in response to a peers subscription.""" details = self.real_name().split()[1:] return (self.ip_addr or self.host, self.host, details) def real_name(self): """Real name of this peer as used on IRC.""" def port_text(letter, port): if port == self.DEFAULT_PORTS.get(letter): return letter else: return letter + str(port) parts = [self.host, 'v' + self.protocol_max] if self.pruning: parts.append(f'p{self.pruning:d}') for letter, port in (('s', self.ssl_port), ('t', self.tcp_port)): if port: parts.append(port_text(letter, port)) return ' '.join(parts) @classmethod def from_real_name(cls, real_name, source): """Real name is a real name as on IRC, such as "erbium1.sytes.net v1.0 s t" Returns an instance of this Peer class. """ host = 'nohost' features = {} ports = {} for n, part in enumerate(real_name.split()): if n == 0: host = part continue if part[0] in ('s', 't'): if len(part) == 1: port = cls.DEFAULT_PORTS[part[0]] else: port = part[1:] if part[0] == 's': ports['ssl_port'] = port else: ports['tcp_port'] = port elif part[0] == 'v': features['protocol_max'] = features['protocol_min'] = part[1:] elif part[0] == 'p': features['pruning'] = part[1:] features.update(ports) features['hosts'] = {host: ports} return cls(host, features, source)
1.554688
2
shop/migrations/0009_auto_20200310_1430.py
manson800819/test
0
2512
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-03-10 14:30 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('shop', '0008_auto_20200310_1134'), ] operations = [ migrations.RemoveField( model_name='category', name='id', ), migrations.AlterField( model_name='category', name='name', field=models.CharField(db_index=True, max_length=200, primary_key=True, serialize=False), ), migrations.AlterField( model_name='product', name='type1', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='shop.Type1'), ), ]
1.148438
1
workoutizer/__main__.py
pa3kDaWae/workoutizer
0
2520
import os import argparse import subprocess import socket import sys import click from django.core.management import execute_from_command_line from workoutizer.settings import WORKOUTIZER_DIR, WORKOUTIZER_DB_PATH, TRACKS_DIR from workoutizer import __version__ BASE_DIR = os.path.dirname(os.path.dirname(__file__)) SETUP_DIR = os.path.join(BASE_DIR, 'setup') os.environ["DJANGO_SETTINGS_MODULE"] = "workoutizer.settings" example_rpi_cmd = "wkz --setup_rpi vendor_id=091e product_id=4b48" url_help = 'specify ip address and port pair, like: address:port' @click.group() def cli(): pass @click.command(help='Mandatory command to initialize workoutizer. This fetches the static files, creates the database ' 'and applies the required migrations.') def init(): _build_home() execute_from_command_line(["manage.py", "collectstatic", "--noinput"]) execute_from_command_line(["manage.py", "migrate"]) execute_from_command_line(["manage.py", "check"]) click.echo(f"database and track files are stored in: {WORKOUTIZER_DIR}") @click.option('--ip', default="", help=url_help) @click.option('--product_id', help="product ip of your device", required=True) @click.option('--vendor_id', help="vendor ip of your device", required=True) @click.command(help='Configure Raspberry Pi to auto mount devices. Passing vendor and product id is required. Passing ' f'the local ip address and port is optionally. E.g.: {example_rpi_cmd}') def setup_rpi(ip, vendor_id, product_id): if not ip: ip = _get_local_ip_address() answer = input(f"Are you sure you want to setup your Raspberry Pi?\n\n" f"This will copy the required udev rule and systemd service file\n" f"to your system to enable automated mounting of your device.\n" f"This might take a while...\n\n" f"Start setup? [Y/n] ") if answer.lower() == 'y': click.echo(f"installing ansible...") _pip_install('ansible==2.9.10') click.echo(f"starting setup using ansible...") _setup_rpi( vendor_id=vendor_id, product_id=product_id, ip_port=f"{ip}:8000" ) _run_ansible(playbook='install_packages.yml') click.echo(f"Successfully configured to automatically mount your device when plugged in. Note: These changes " f"require a system restart to take effect.") else: click.echo(f"Aborted.") @click.argument('url', default="") @click.command(help="Run workoutizer. Passing the local ip address and port is optionally. In case of no ip address " "being passed, it will be determined automatically. Usage, e.g.: 'wkz run 0.0.0.0:8000'.") def run(url): if not url: url = f"{_get_local_ip_address()}:8000" execute_from_command_line(["manage.py", "runserver", url]) @click.argument('url', default="") @click.command(help='Configure workoutizer to run as systemd service. Passing the local ip address and port is ' 'optionally. In case of no ip address being passed, it will be determined automatically.') def wkz_as_service(url): _pip_install('ansible==2.9.10') _wkz_as_service(url=url) @click.argument('cmd', nargs=1) @click.command(help="Pass commands to django's manage.py. Convenience function to access all django commands which are " "not yet covered with the given set of workoutizer commands. Usage, e.g.: " "wkz manage 'runserver 0.0.0.0:8000 --noreload'.") def manage(cmd): execute_from_command_line(["manage.py"] + cmd.split(' ')) @click.command(help='Show the version of currently installed workoutizer.') def version(): click.echo(__version__) @click.command(help='Check for a newer version and install if there is any.') def upgrade(): _upgrade() cli.add_command(upgrade) cli.add_command(version) cli.add_command(init) cli.add_command(setup_rpi) cli.add_command(run) cli.add_command(manage) cli.add_command(wkz_as_service) def _upgrade(): latest_version = _get_latest_version_of("workoutizer") from workoutizer import __version__ as current_version if latest_version: click.echo(f"found newer version: {latest_version}, you have {current_version} installed") _pip_install('workoutizer', upgrade=True) execute_from_command_line(["manage.py", "collectstatic", "--noinput"]) execute_from_command_line(["manage.py", "migrate"]) execute_from_command_line(["manage.py", "check"]) click.echo(f"Successfully upgraded from {current_version} to {latest_version}") else: click.echo(f"No update available. You are running the latest version: {current_version}") def _get_latest_version_of(package: str): outdated = str( subprocess.check_output([sys.executable, "-m", "pip", "list", '--outdated', '--disable-pip-version-check'])) if package in outdated: output = str(subprocess.check_output([sys.executable, "-m", "pip", "search", package])) latest_version = output[output.find('LATEST'):].split('\\n')[0].split(' ')[-1] return latest_version else: return False def _setup_rpi(vendor_id: str, product_id: str, ip_port: str = None): if not ip_port: ip_port = f"{_get_local_ip_address()}:8000" result = _run_ansible( playbook='setup_on_rpi.yml', variables={ 'vendor_id': vendor_id, 'product_id': product_id, 'address_plus_port': ip_port, } ) if result == 0: pass else: click.echo(f"ERROR: Could not configure Raspberry Pi, see above errors.") quit() return result def _wkz_as_service(url: str): click.echo(f"configuring workoutizer to run as system service") if not url: url = f"{_get_local_ip_address()}:8000" env_binaries = sys.executable wkz_executable = env_binaries[:env_binaries.find('python')] + "wkz" result = _run_ansible( playbook='wkz_as_service.yml', variables={ 'address_plus_port': url, 'wkz_executable': wkz_executable, } ) if result == 0: click.echo(f"Successfully configured workoutizer as systemd service. Run it with: systemctl start wkz.service") else: click.echo(f"ERROR: Could not configure workoutizer as systemd service, see above errors.") return result def _get_local_ip_address(): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(("8.8.8.8", 80)) ip_address = s.getsockname()[0] s.close() return ip_address def _build_home(): if os.path.isdir(WORKOUTIZER_DIR): if os.path.isfile(WORKOUTIZER_DB_PATH): click.echo(f"Found existing workoutizer database at: {WORKOUTIZER_DB_PATH}\n") answer = input(f"Workoutizer could try to use the existing database instead of creating a new one.\n" f"Note that this could lead to faulty behaviour because of mismatching applied\n" f"migrations on this database.\n\n" f"Do you want to use the existing database instead of creating a new one? [Y/n] ") if answer.lower() == 'y': click.echo(f"keeping existing database at {WORKOUTIZER_DB_PATH}") return else: click.echo(f"removed database at {WORKOUTIZER_DB_PATH}") os.remove(WORKOUTIZER_DB_PATH) _make_tracks_dir(TRACKS_DIR) else: os.mkdir(WORKOUTIZER_DIR) _make_tracks_dir(TRACKS_DIR) def _make_tracks_dir(path): if not os.path.isdir(path): os.mkdir(path) class ParseDict(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): d = {} if values: for item in values: split_items = item.split("=", 1) key = split_items[0].strip() # we remove blanks around keys, as is logical value = split_items[1] d[key] = value setattr(namespace, self.dest, d) def _pip_install(package, upgrade: bool = False): if upgrade: subprocess.check_call([sys.executable, "-m", "pip", "install", package, '--upgrade']) else: subprocess.check_call([sys.executable, "-m", "pip", "install", package]) def _run_ansible(playbook: str, variables: dict = None): if variables is None: variables = {} from ansible import context from ansible.cli import CLI from ansible.module_utils.common.collections import ImmutableDict from ansible.executor.playbook_executor import PlaybookExecutor from ansible.parsing.dataloader import DataLoader from ansible.inventory.manager import InventoryManager from ansible.vars.manager import VariableManager loader = DataLoader() context.CLIARGS = ImmutableDict( tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='xxx', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user='root', verbosity=True, check=False, start_at_task=None ) inventory = InventoryManager(loader=loader, sources=()) variable_manager = VariableManager(loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) variable_manager._extra_vars = variables pbex = PlaybookExecutor(playbooks=[os.path.join(SETUP_DIR, 'ansible', playbook)], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) return pbex.run() if __name__ == '__main__': cli()
1.476563
1
AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py
jlamperez/Vitis-Tutorials
1
2536
# # Copyright 2020–2021 Xilinx, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from math import * import random def GenerateTestVector(dtval,pliow,NPhases_s,NStreams_s,NSamples_s,NFrames_s,SeqType_s,Basename_s): print('DtVal : ',dtval.get()) print('PLIO width : ',pliow.get()) print('NPhases : ',NPhases_s.get()) print('NStreams : ',NStreams_s.get()) print('NSamples : ',NSamples_s.get()) print('NFrames : ',NFrames_s.get()) print('Type of Sequence : ',SeqType_s.get()) print('Base filename : ',Basename_s.get()) NPhases = int(NPhases_s.get()) NStreams = int(NStreams_s.get()) LFrame = int(NSamples_s.get()) NFrames = int(NFrames_s.get()) SequenceType = SeqType_s.get() Basename = Basename_s.get() #parameters that should be in the GUI # SequenceType ='Linear' # 'SinCos' 'Linear' 'Random' 'Dirac' # Basename = 'PhaseIn' NSamples = NPhases*NStreams*LFrame*NFrames; NSamples1 = NPhases*NStreams*LFrame*(NFrames+1); # A little longer to allow for delay in streams NBitsData = 32; if( dtval.get() == 'int16'): NBitsData = 16 HasImag = 0 if (dtval.get() == 'cint16'): HasImag = 1 if(SequenceType != 'SinCos' and SequenceType != 'Linear' and SequenceType != 'Random' and SequenceType != 'Dirac'): print ('Unknown Sequence Type') return # Create the overall signal that will be distributed over all streams # it is already separated in phases S = np.zeros((NPhases,int(NSamples1/NPhases),1+HasImag)) for i in range(int(NSamples1/NPhases)): for p in range (NPhases): k = i*NPhases+p if (SequenceType == 'SinCos'): vr = int(5000*cos(6.28*5/(NPhases*NStreams*LFrame)*k)) vi = int(5000*sin(6.28*5/(NPhases*NStreams*LFrame)*k)) elif (SequenceType == 'Linear'): vr = k vi = -k elif (SequenceType == 'Random'): vr = random.randint(-5000,5000) vi = random.randint(-5000,5000) elif (SequenceType == 'Dirac'): vr = 0 vi = 0 if(k%151 == 1): vr = 1 elif(k%151 == 40): vi = 1 elif(k%151 == 81): vr = 2 elif(k%151 == 115): vi = -2 # if(k%311 == 50): # vr = 1 # S[p,i,0] = # if(HasImag==1): # S[p,i,1] = int(5000*sin(6.28*5/(NPhases*NStreams*LFrame)*k)) S[p,i,0] = vr if (HasImag == 1 ): S[p,i,1] = vi PLIOwidth = int(pliow.get()) NSamplesPerLine = int(PLIOwidth/NBitsData) # Data are read in blocks of 128 bits (4 data in cint16) # Create an Input test Vector in TestInputS.txt FileNames = []; # Easiest case: 1 stream per AI Engine if (NStreams == 1): #Creates list of filenames for Phi in range(NPhases): FileNames.append(Basename+'_'+str(Phi)+'.txt') #Open all files fds = [open(path, 'w') for path in FileNames] #Fill all files with the right data for p in range(NPhases): fd = fds[p] for s in range(int(NSamples1/NPhases/NSamplesPerLine)): for d in range(NSamplesPerLine): index = s*NSamplesPerLine + d fd.write(str(int(S[p,index,0]))+' ') if(HasImag): fd.write(str(int(S[p,index,1]))+' ') fd.write('\n') for fd in fds: fd.close() if (NStreams == 2): #Creates list of filenames for Phi in range(NPhases): for Stream in range(NStreams): FileNames.append('PhaseIn_'+str(Phi)+'_'+str(Stream)+'.txt') # Hash table to associate data to streams NSamplesIn128bits = int(128/NBitsData ) H = np.zeros((int(NSamples1/NPhases/2),2)) H = H.astype('int32') index = np.zeros(2) index = index.astype('int32') for s in range(int(NSamples1/NPhases)): k = int(s/NSamplesIn128bits) # Block order i = k%2 # Which streams H[index[i],i] = s index[i] = index[i]+1 #Open all files fds = [open(path, 'w') for path in FileNames] #Fill all files with the right data for p in range(NPhases): for stream in range(2): fd = fds[2*p+stream] for s in range(int(NSamples1/NPhases/NSamplesPerLine/NStreams)): for d in range(NSamplesPerLine): index = s*NSamplesPerLine + d fd.write(str(int(S[p,H[index,stream],0]))+' ') if(HasImag): fd.write(str(int(S[p,H[index,stream],1]))+' ') fd.write('\n') for fd in fds: fd.close()
1.632813
2
src/dependencies/contrib/celery.py
nicoddemus/dependencies
0
2560
""" dependencies.contrib.celery --------------------------- This module implements injectable Celery task. :copyright: (c) 2016-2020 by dry-python team. :license: BSD, see LICENSE for more details. """ from _dependencies.contrib.celery import shared_task from _dependencies.contrib.celery import task __all__ = ["shared_task", "task"]
1.023438
1
epiphancloud/models/settings.py
epiphan-video/epiphancloud_api
0
2648
class DeviceSettings: def __init__(self, settings): self._id = settings["id"] self._title = settings["title"] self._type = settings["type"]["name"] self._value = settings["value"] @property def id(self): return self._id @property def value(self): return self._value
1.367188
1
DistributedRL/Gateway/build/Code/sim/Parser/LAI/GreenIndex.py
zhkmxx9302013/SoftwarePilot
4
2656
import argparse from PIL import Image, ImageStat import math parser = argparse.ArgumentParser() parser.add_argument('fname') parser.add_argument('pref', default="", nargs="?") args = parser.parse_args() im = Image.open(args.fname) RGB = im.convert('RGB') imWidth, imHeight = im.size ratg = 1.2 ratgb = 1.66 ming = 10 ratr = 2 speed = 8 leafcount = 0 total = 0 for i in range(0, int(imWidth/speed)): for j in range(0, int(imHeight/speed)): R,G,B = RGB.getpixel((i*speed,j*speed)) if R*ratg < G and B*ratgb < G and B*ratr < R: leafcount = leafcount + 1 total = total+1 print("LAI="+str(float(leafcount)/total))
2.03125
2
sandbox/test/testChainop.py
turkeydonkey/nzmath3
1
2664
import unittest import operator import sandbox.chainop as chainop class BasicChainTest (unittest.TestCase): def testBasicChain(self): double = lambda x: x * 2 self.assertEqual(62, chainop.basic_chain((operator.add, double), 2, 31)) square = lambda x: x ** 2 self.assertEqual(2**31, chainop.basic_chain((operator.mul, square), 2, 31)) class MultiChainTest (unittest.TestCase): def testMultiChain(self): double = lambda x: x * 2 self.assertEqual([62, 93], chainop.multi_chains((operator.add, double), (2, 3), 31)) square = lambda x: x ** 2 self.assertEqual([2**31, 3**31], chainop.multi_chains((operator.mul, square), [2, 3], 31)) def suite(suffix="Test"): suite = unittest.TestSuite() all_names = globals() for name in all_names: if name.endswith(suffix): suite.addTest(unittest.makeSuite(all_names[name], "test")) return suite if __name__ == '__main__': runner = unittest.TextTestRunner() runner.run(suite())
1.882813
2
symbolicR/python/forward_kin.py
mharding01/augmented-neuromuscular-RT-running
0
2696
import numpy as np import sympy as sp import re import os ###################### # # # 17 16 21 # # 18 15 22 # # 19 14 23 # # 20 01 24 # # 02 08 # # 03 09 # # 04 10 # # 05 11 # # 06 12 # # 07 13 # # # ###################### # # origin: in the waist, middle point between the two pitch hip rotations # inertial frame: located at the origin (waist), but aligned with the ground (info from IMU) # # Di : position vector from the anchor point of the previous body to the current body i # (previous body is not always body i-1), expressed in the relative # frame of the previous body # DGi : position vector from the anchor point of body i to its COM (center of mass) G_i, # expressed in the relative frame of the current body i # Omi : rotational vector from the previous body to the current body i # (previous body is not always body i-1), expressed in the relative # frame of the previous body # Rdi : rotational matrix between body i and its predecessor # si : sine of the relative angle before body i # ci : cosine of the relative angle before body i # # xi : absolute position vector (from origin, expressed in the inertial frame) # of the anchor point of body i # xgi : absolute position vector of the COM G_i of body i # xpi : derivative of xi # xgpi : derivative of xgi # omi : absolute rotational vector of body i # Ri : absolute rotational matrix # Rti : transpose matrix of Ri # xji : jacobian of 'xi' # xgji : jacobian of 'xgi' # Rji : jacobian of 'Ri' # return true if it is a float def isInt(value): try: int(value) return True except: return False # return true if it has a shape 'R%a_%b%c' (indexes %a, %b, %c also returned) def isRot(value): try: a = int(value.split('_')[0].split('R')[1]) b = int(value.split('_')[1][0]) c = int(value.split('_')[1][1]) return True, a, b, c except: return False, -1, -1, -1 # return true if it has a shape 'x%a_%b' (indexes %a, %b also returned) def isVec(value): try: a = int(value.split('_')[0].split('x')[1]) b = int(value.split('_')[1]) return True, a, b except: return False, -1, -1 # count the number of 'elem' in the file def count_elem(in_file, elem): count = 0; with open(in_file, 'r') as f: # loop on all the lines for line in f: cut_line = line.split(elem) if len(cut_line) == 2: count += 1 return count # print the declaration of an element def print_declaration_elem(in_file, out_write, elem, nb_max_line): if count_elem(in_file, '{}'.format(elem)) >= 1: count = 0 with open(in_file,'r') as f: # loop on all the lines for line in f: cut_line_1 = line.split(elem) cut_line_2 = line.split(' = ') if len(cut_line_1) == 2 and len(cut_line_2) == 2: if len(cut_line_2[0].split('[')) == 1: if count == 0: out_write.write(' double {}'.format(cut_line_2[0].strip())) else: out_write.write(', {}'.format(cut_line_2[0].strip())) count += 1 if count >= nb_max_line: out_write.write(';\n') count = 0 if count != 0: out_write.write(';\n') # print all declarations def print_all_declaration(in_file, out_write, nb_max_char): count = 0 with open(in_file,'r') as f: # loop on all the lines for line in f: cut_line = line.split(' = ') if len(cut_line) == 2: if len(cut_line[0].split('[')) == 1: if count == 0: out_write.write(' double {}'.format(cut_line[0].strip())) else: out_write.write(', {}'.format(cut_line[0].strip())) count += len(cut_line[0].strip()) + 2 if count >= nb_max_char: out_write.write(';\n') count = 0 if count != 0: out_write.write(';\n') # get tilde matrix def get_tilde(v): return np.array([[0.0, -v[2], v[1]], [v[2], 0.0, -v[0]], [-v[1], v[0], 0.0]]) # get rotation matrix def get_rotation_matrix(axis, direct, cosine, sine): if direct: if axis == 1: return np.array([[1.0, 0.0, 0.0], [0.0, cosine, sine], [0.0, -sine, cosine]]) elif axis == 2: return np.array([[cosine, 0.0, -sine], [0.0, 1.0, 0.0], [sine, 0.0, cosine]]) elif axis == 3: return np.array([[cosine, sine, 0.0], [-sine, cosine, 0.0], [0.0, 0.0, 1.0]]) else: return np.array([]) else: if axis == 1: return np.array([[1.0, 0.0, 0.0], [0.0, cosine, -sine], [0.0, sine, cosine]]) elif axis == 2: return np.array([[cosine, 0.0, sine], [0.0, 1.0, 0.0], [-sine, 0.0, cosine]]) elif axis == 3: return np.array([[cosine, -sine, 0.0], [sine, cosine, 0.0], [0.0, 0.0, 1.0]]) else: return np.array([]) # get vector axis def get_vector_axis(axis, direct, elem): if direct: if axis == 1: return np.array([[elem], [0.0], [0.0]]) elif axis == 2: return np.array([[0.0], [elem], [0.0]]) elif axis == 3: return np.array([[0.0], [0.0], [elem]]) else: return np.array([]) else: if axis == 1: return np.array([[-elem], [0.0], [0.0]]) elif axis == 2: return np.array([[0.0], [-elem], [0.0]]) elif axis == 3: return np.array([[0.0], [0.0], [-elem]]) else: return np.array([]) # compute the derivative of an element (for jacobian) def der_elem(elem_str, Rj, xj, xgj, der_var): # element to derive (string) elem_str = elem_str.replace('- ','-').strip() # derivative axis der_q = int(der_var.replace('q','')) # detect positive/negative elem_split = elem_str.split('-') cur_len = len(elem_split) if cur_len == 1: # positive neg_flag = 0 pos_str = elem_split[0] elif cur_len == 2: # negative neg_flag = 1 pos_str = elem_split[1] else: print('Error: {} instead of 1 or 2 in negative detection !'.format(cur_len)) exit() # compute derivative result = 0 # cosine if pos_str == 'c{}'.format(der_q): result += -sp.Symbol('s{}'.format(der_q)) # sine elif pos_str == 's{}'.format(der_q): result += sp.Symbol('c{}'.format(der_q)) # other else: [rot_flag, a, b, c] = isRot(pos_str) [vec_flag, d, e] = isVec(pos_str) # rotation matrix if rot_flag: result += Rj[a-1][der_q-1][(b-1)*3+(c-1)] # vector elif vec_flag: result += xj[d-1][der_q-1][e-1] # apply negative if neg_flag: result = -result return result # compute the derivative of an expression (for jacobian) def symbolic_jacob_der(Rj, xj, xgj, symb_var, der_var): # list of all terms term_list = str(symb_var).replace('- ','-').replace('-','+-').split('+') if term_list[0] == '': term_list.pop(0) result = 0 # loop on all terms for cur_term in term_list: # detect products cur_term_split = cur_term.split('*') cur_len = len(cur_term_split) # no product if cur_len == 1: result += der_elem(cur_term_split[0], Rj, xj, xgj, der_var) # one product elif cur_len == 2: result += der_elem(cur_term_split[0], Rj, xj, xgj, der_var)*sp.Symbol(cur_term_split[1].strip()) result += der_elem(cur_term_split[1], Rj, xj, xgj, der_var)*sp.Symbol(cur_term_split[0].strip()) # other else: print('Error: {} * counted , only implemented for 0 or 1 !'.format(cur_len-1)) exit() return result # write the beginning of the file def write_file_beginning(out_file, joint_id_names): out_file.write('/*! \n') out_file.write(' * \\author <NAME>\n') out_file.write(' * \\file forward_kinematics.cc\n') out_file.write(' * \\brief forward kinematics computation for the COMAN model\n') out_file.write(' */\n\n') out_file.write('// joints enumeration\n') out_file.write('enum {') count = 0 for i in range(1, len(joint_id_names)): count += 1 if i == 1: out_file.write('{}'.format(get_string_enum(joint_id_names[i]))) elif count >= 6: count = 0 out_file.write(',\n {}'.format(get_string_enum(joint_id_names[i]))) else: out_file.write(', {}'.format(get_string_enum(joint_id_names[i]))) out_file.write('};\n\n') out_file.write('/*! \\brief main kinematics computation\n') out_file.write(' *\n') out_file.write(' * \\param[in,out] in_out inputs and outputs class\n') out_file.write(' *\n') out_file.write(' * computation of:\n') out_file.write(' * COM (center of mass) position and velocity\n') out_file.write(' * feet position, velocity and orientation\n') out_file.write(' * waist and torso orientaion angles and derivatives\n') out_file.write(' *\n') out_file.write(' * ////////////////////////\n') out_file.write(' * // //\n') out_file.write(' * // 17 16 21 //\n') out_file.write(' * // 18 15 22 //\n') out_file.write(' * // 19 14 23 //\n') out_file.write(' * // 20 01 24 //\n') out_file.write(' * // 02 08 //\n') out_file.write(' * // 03 09 //\n') out_file.write(' * // 04 10 //\n') out_file.write(' * // 05 11 //\n') out_file.write(' * // 06 12 //\n') out_file.write(' * // 07 13 //\n') out_file.write(' * // //\n') out_file.write(' * ////////////////////////\n') out_file.write(' *\n') out_file.write(' * origin: in the waist, middle point between the two pitch hip rotations\n') out_file.write(' * inertial frame: located at the origin (waist), but aligned with the ground (info from IMU)\n') out_file.write(' *\n') out_file.write(' * Di : position vector from the anchor point of the previous body to the current body i \n') out_file.write(' * (previous body is not always body i-1), expressed in the relative\n') out_file.write(' * frame of the previous body\n') out_file.write(' * DGi : position vector from the anchor point of body i to its COM (center of mass) G_i,\n') out_file.write(' * expressed in the relative frame of the current body i\n') out_file.write(' * Omi : rotational vector from the previous body to the current body i \n') out_file.write(' * (previous body is not always body i-1), expressed in the relative\n') out_file.write(' * frame of the previous body\n') out_file.write(' * Rdi : rotational matrix between body i and its predecessor\n') out_file.write(' * si : sine of the relative angle before body i\n') out_file.write(' * ci : cosine of the relative angle before body i\n') out_file.write(' *\n') out_file.write(' * xi : absolute position vector (from origin, expressed in the inertial frame)\n') out_file.write(' * of the anchor point of body i\n') out_file.write(' * xgi : absolute position vector of the COM G_i of body i\n') out_file.write(' * xpi : derivative of xi\n') out_file.write(' * xgpi : derivative of xgi\n') out_file.write(' * omi : absolute rotational vector of body i\n') out_file.write(' * Ri : absolute rotational matrix\n') out_file.write(' * Rti : transpose matrix of Ri\n') out_file.write(' * xji : jacobian of \'xi\'\n') out_file.write(' * xgji : jacobian of \'xgi\'\n') out_file.write(' * Rji : jacobian of \'Ri\'\n') out_file.write(' */\n') out_file.write('void ForwardKinematics::main_kinematics(KinematicsInOut &in_out)\n{\n') # compute the center of mass position and velocity def com_compute(out_file, nb_bodies, joint_id_names, M, xg, xgp, xgj): out_file.write(' m_tot = ') for i in range(0, nb_bodies): out_file.write('{}'.format(M[i])) if i == nb_bodies-1: out_file.write(';\n\n') else: out_file.write(' + ') out_file.write(' // global com absolute position\n') for i in range(0, 3): out_file.write(' in_out.r_COM[{}] = '.format(i)) flag_first = 0 for j in range(0, nb_bodies): if flag_first: out_file.write(' + {}*{}'.format(M[j], xg[j][i])) else: flag_first = 1 out_file.write('({}*xg{}_{}'.format(M[j], j+1, i+1)) if j == nb_bodies-1: if flag_first: out_file.write(')/m_tot;\n') else: out_file.write('0.0;\n') out_file.write('\n') out_file.write(' // global com absolute velocity\n') for i in range(0, 3): out_file.write(' in_out.rp_COM[{}] = '.format(i)) flag_first = 0 for j in range(0, nb_bodies): if flag_first: out_file.write(' + {}*xgp{}_{}'.format(M[j], j+1, i+1)) else: flag_first = 1 out_file.write('({}*xgp{}_{}'.format(M[j], j+1, i+1)) if j == nb_bodies-1: if flag_first: out_file.write(')/m_tot;\n') else: out_file.write('0.0;\n') out_file.write('\n') out_file.write(' // global com jacobian\n') out_file.write(' if (flag_jacob)\n {\n') for i in range(1, nb_bodies): for j in range(0, 3): out_file.write(' in_out.r_COM_der[{}][{}] = '.format(get_string_enum(joint_id_names[i]), j)) flag_first = 0 for k in range(0, nb_bodies): if xgj[k][i][j] != 0: if flag_first: out_file.write(' + {}*{}'.format(M[k], str(xgj[k][i][j]))) else: flag_first = 1 out_file.write('({}*{}'.format(M[k], str(xgj[k][i][j]))) if k == nb_bodies-1: if flag_first: out_file.write(')/m_tot;\n') else: out_file.write('0.0;\n') if i != nb_bodies-1: out_file.write('\n') else: out_file.write(' }\n\n') # from an orientation matrix, compute the roll, pitch, yaw angles (and derivative) def yaw_pitch_roll_angles(out_file, angle_name, R_matrix, epsilon): if epsilon > 0: # epsilon = 1 -> pitch angle in [-pi/2 ; pi/2] out_file.write(' in_out.{}[0] = atan2({}, {});\n'.format(angle_name, R_matrix[5], R_matrix[8])) out_file.write(' in_out.{}[1] = atan2(-{}, sqrt({}*{} + {}*{}));\n'.format(angle_name, R_matrix[2], R_matrix[0], R_matrix[0], R_matrix[1], R_matrix[1])) out_file.write(' in_out.{}[2] = atan2({}, {});\n'.format(angle_name, R_matrix[1], R_matrix[0])) else: # epsilon = -1 -> pitch angle in [pi/2 ; 3*pi/2] out_file.write(' in_out.{}[0] = atan2(-{}, -{});\n'.format(angle_name, R_matrix[5], R_matrix[8])) out_file.write(' in_out.{}[1] = atan2(-{}, -sqrt({}*{} + {}*{}));\n'.format(angle_name, R_matrix[2], R_matrix[0], R_matrix[0], R_matrix[1], R_matrix[1])) out_file.write(' in_out.{}[2] = atan2(-{}, -{});\n'.format(angle_name, R_matrix[1], R_matrix[0])) # compute the time derivatives of 'yaw_pitch_roll_angles' def theta_dot_compute(out_file, omega_in, omega_out, body_part): out_file.write(' in_out.{}[0] = inv_c_y_{} * (c_z_{}*{} + s_z_{}*{});\n'.format(omega_out, body_part, body_part, omega_in[0], body_part, omega_in[1])) out_file.write(' in_out.{}[1] = c_z_{}*{} - s_z_{}*{};\n'.format(omega_out, body_part, omega_in[1], body_part, omega_in[0])) out_file.write(' in_out.{}[2] = inv_c_y_{} * s_y_{} * (s_z_{}*{} + c_z_{}*{}) + {};\n'.format(omega_out, body_part, body_part, body_part, omega_in[1], body_part, omega_in[0], omega_in[2])) # angles (position and derivative) of the waist and the torso def torso_waist_angles(out_file, R, om, waist_id, torso_id): out_file.write(' // waist orientation matrix as angles [rad]\n') yaw_pitch_roll_angles(out_file, 'theta_waist', R[waist_id], 1) out_file.write('\n') out_file.write(' // torso orientation matrix as angles [rad]\n') yaw_pitch_roll_angles(out_file, 'theta_torso', R[torso_id], 1) out_file.write('\n') out_file.write(' c_y_waist = cos(in_out.theta_waist[1]);\n') out_file.write(' c_y_torso = cos(in_out.theta_torso[1]);\n') out_file.write(' c_z_waist = cos(in_out.theta_waist[2]);\n') out_file.write(' c_z_torso = cos(in_out.theta_torso[2]);\n\n') out_file.write(' s_y_waist = sin(in_out.theta_waist[1]);\n') out_file.write(' s_y_torso = sin(in_out.theta_torso[1]);\n') out_file.write(' s_z_waist = sin(in_out.theta_waist[2]);\n') out_file.write(' s_z_torso = sin(in_out.theta_torso[2]);\n\n') out_file.write(' if ((!c_y_waist) || (!c_y_torso))\n {\n') out_file.write(' return;\n }\n\n') out_file.write(' inv_c_y_waist = 1.0 / c_y_waist;\n') out_file.write(' inv_c_y_torso = 1.0 / c_y_torso;\n\n') out_file.write(' // waist orientation angle derivatives [rad/s]\n') theta_dot_compute(out_file, om[waist_id], 'omega_waist', 'waist') out_file.write('\n') out_file.write(' // torso orientation angle derivatives [rad/s]\n') theta_dot_compute(out_file, om[torso_id], 'omega_torso', 'torso') # compute the feet position, velocity and orientation def feet_compute(out_file, joint_id_names, R, x, xp, om, Rj, xj, xgj, r_foot_id, l_foot_id, x_min, x_max, y_min, y_max): # symbolic variables declarations nb_contacts = 4 x_r_foot = x[r_foot_id] x_l_foot = x[l_foot_id] xp_r_foot = xp[r_foot_id] xp_l_foot = xp[l_foot_id] om_r_foot = om[r_foot_id] om_l_foot = om[l_foot_id] R_r_foot = R[r_foot_id] R_l_foot = R[l_foot_id] Dpt_r_foot = sp.zeros(3, 1) Dpt_l_foot = sp.zeros(3, 1) Dpt_r_foot[2] = sp.Symbol('DPT_3_16') Dpt_l_foot[2] = sp.Symbol('DPT_3_29') Dpt_r_foot_cont = nb_contacts * [None] Dpt_l_foot_cont = nb_contacts * [None] for i in range(0, nb_contacts): Dpt_r_foot_cont[i] = sp.zeros(3, 1) Dpt_l_foot_cont[i] = sp.zeros(3, 1) Dpt_r_foot_cont[0][0] = x_min Dpt_r_foot_cont[1][0] = x_min Dpt_r_foot_cont[2][0] = x_max Dpt_r_foot_cont[3][0] = x_max Dpt_r_foot_cont[0][1] = y_min Dpt_r_foot_cont[1][1] = y_max Dpt_r_foot_cont[2][1] = y_min Dpt_r_foot_cont[3][1] = y_max for i in range(0, nb_contacts): Dpt_r_foot_cont[i][2] = sp.Symbol('DPT_3_16') for i in range(0, nb_contacts): for j in range(0, 3): Dpt_l_foot_cont[i][j] = Dpt_r_foot_cont[i][j] x_r_cont = nb_contacts * [None] x_l_cont = nb_contacts * [None] # computation om_tilde_r_foot = get_tilde(om_r_foot) om_tilde_l_foot = get_tilde(om_l_foot) x_r = x_r_foot + R_r_foot.T * Dpt_r_foot x_l = x_l_foot + R_l_foot.T * Dpt_l_foot xp_r = xp_r_foot + om_tilde_r_foot * (R_r_foot.T * Dpt_r_foot) xp_l = xp_l_foot + om_tilde_l_foot * (R_l_foot.T * Dpt_l_foot) for i in range(0, nb_contacts): x_r_cont[i] = x_r_foot + R_r_foot.T * Dpt_r_foot_cont[i] x_l_cont[i] = x_l_foot + R_l_foot.T * Dpt_l_foot_cont[i] # writing outputs out_file.write(' // right foot absolute position\n') for i in range(0,3): out_file.write(' in_out.r_Rfoot[{}] = {};\n'.format(i, x_r[i])) out_file.write('\n') out_file.write(' // right foot absolute velocity\n') for i in range(0,3): out_file.write(' in_out.rp_Rfoot[{}] = {};\n'.format(i, xp_r[i])) out_file.write('\n') out_file.write(' // right foot jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0, 3): cur_jac = symbolic_jacob_der(Rj, xj, xgj, x_r[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.r_Rfoot_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') out_file.write(' // left foot absolute position\n') for i in range(0,3): out_file.write(' in_out.r_Lfoot[{}] = {};\n'.format(i, x_l[i])) out_file.write('\n') out_file.write(' // left foot absolute velocity\n') for i in range(0,3): out_file.write(' in_out.rp_Lfoot[{}] = {};\n'.format(i, xp_l[i])) out_file.write('\n') out_file.write(' // left foot jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0, 3): cur_jac = symbolic_jacob_der(Rj, xj, xgj, x_l[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.r_Lfoot_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') out_file.write(' // right foot contact points absolute position\n') for i in range(0, nb_contacts): for j in range(0, 3): out_file.write(' in_out.r_Rfoot_cont[{}][{}] = {};\n'.format(i, j, x_r_cont[i][j])) out_file.write('\n') out_file.write(' // right foot contact points jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range(0, nb_contacts): for j in range (1, nb_bodies): flag_print = 0 for k in range(0, 3): cur_jac = symbolic_jacob_der(Rj, xj, xgj, x_r_cont[i][k], 'q{}'.format(j+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.r_Rfoot_cont_der[{}][{}][{}] = {};\n'.format(i, get_string_enum(joint_id_names[j]), k, cur_jac)) out_file.write(' }\n\n') out_file.write(' // left foot contact points absolute position\n') for i in range(0, nb_contacts): for j in range(0, 3): out_file.write(' in_out.r_Lfoot_cont[{}][{}] = {};\n'.format(i, j, x_l_cont[i][j])) out_file.write('\n') out_file.write(' // left foot contact points jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range(0, nb_contacts): for j in range (1, nb_bodies): flag_print = 0 for k in range(0, 3): cur_jac = symbolic_jacob_der(Rj, xj, xgj, x_l_cont[i][k], 'q{}'.format(j+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.r_Lfoot_cont_der[{}][{}][{}] = {};\n'.format(i, get_string_enum(joint_id_names[j]), k, cur_jac)) out_file.write(' }\n\n') out_file.write(' // feet absolute orientation\n') for i in range(0, 9): out_file.write(' in_out.Rfoot_or[{}] = {};\n'.format(i, R_r_foot[i])) out_file.write('\n') for i in range(0, 9): out_file.write(' in_out.Lfoot_or[{}] = {};\n'.format(i, R_l_foot[i])) out_file.write('\n') out_file.write(' // right foot absolute orientation jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0,9): cur_jac = symbolic_jacob_der(Rj, xj, xgj, R_r_foot[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.Rfoot_or_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') out_file.write(' // left foot absolute orientation jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0,9): cur_jac = symbolic_jacob_der(Rj, xj, xgj, R_l_foot[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.Lfoot_or_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') out_file.write(' // right foot orientation matrix as angles [rad]\n') yaw_pitch_roll_angles(out_file, 'theta_Rfoot', R[r_foot_id], 1) out_file.write('\n') out_file.write(' // left foot orientation matrix as angles [rad]\n') yaw_pitch_roll_angles(out_file, 'theta_Lfoot', R[l_foot_id], 1) out_file.write('\n') out_file.write(' c_y_Rfoot = cos(in_out.theta_Rfoot[1]);\n') out_file.write(' c_y_Lfoot = cos(in_out.theta_Lfoot[1]);\n') out_file.write(' c_z_Rfoot = cos(in_out.theta_Rfoot[2]);\n') out_file.write(' c_z_Lfoot = cos(in_out.theta_Lfoot[2]);\n\n') out_file.write(' s_y_Rfoot = sin(in_out.theta_Rfoot[1]);\n') out_file.write(' s_y_Lfoot = sin(in_out.theta_Lfoot[1]);\n') out_file.write(' s_z_Rfoot = sin(in_out.theta_Rfoot[2]);\n') out_file.write(' s_z_Lfoot = sin(in_out.theta_Lfoot[2]);\n\n') out_file.write(' if ((!c_y_Rfoot) || (!c_y_Lfoot))\n {\n') out_file.write(' return;\n }\n\n') out_file.write(' inv_c_y_Rfoot = 1.0 / c_y_Rfoot;\n') out_file.write(' inv_c_y_Lfoot = 1.0 / c_y_Lfoot;\n\n') out_file.write(' // right foot orientation angle derivatives [rad/s]\n') theta_dot_compute(out_file, om[r_foot_id], 'omega_Rfoot', 'Rfoot') out_file.write('\n') out_file.write(' // left foot orientation angle derivatives [rad/s]\n') theta_dot_compute(out_file, om[l_foot_id], 'omega_Lfoot', 'Lfoot') out_file.write('\n') # compute the wrists position, velocity and orientation def wrists_compute(out_file, joint_id_names, R, x, xp, om, Rj, xj, xgj, r_elb_id, l_elb_id, r_wrist_x, r_wrist_y, r_wrist_z): # symbolic variables declarations x_r_elb = x[r_elb_id] x_l_elb = x[l_elb_id] xp_r_elb = xp[r_elb_id] xp_l_elb = xp[l_elb_id] om_r_elb = om[r_elb_id] om_l_elb = om[l_elb_id] R_r_elb = R[r_elb_id] R_l_elb = R[l_elb_id] Dpt_r_wrist = sp.zeros(3, 1) Dpt_l_wrist = sp.zeros(3, 1) Dpt_r_wrist[0] = r_wrist_x Dpt_r_wrist[1] = r_wrist_y Dpt_r_wrist[2] = r_wrist_z Dpt_l_wrist[0] = r_wrist_x Dpt_l_wrist[1] = -r_wrist_y Dpt_l_wrist[2] = r_wrist_z # computation om_tilde_r_elb = get_tilde(om_r_elb) om_tilde_l_elb = get_tilde(om_l_elb) x_r = x_r_elb + R_r_elb.T * Dpt_r_wrist x_l = x_l_elb + R_l_elb.T * Dpt_l_wrist xp_r = xp_r_elb + om_tilde_r_elb * (R_r_elb.T * Dpt_r_wrist) xp_l = xp_l_elb + om_tilde_l_elb * (R_l_elb.T * Dpt_l_wrist) # writing outputs out_file.write(' // right wrist absolute position\n') for i in range(0,3): out_file.write(' in_out.r_Rwrist[{}] = {};\n'.format(i, x_r[i])) out_file.write('\n') out_file.write(' // right wrist absolute velocity\n') for i in range(0,3): out_file.write(' in_out.rp_Rwrist[{}] = {};\n'.format(i, xp_r[i])) out_file.write('\n') out_file.write(' // right wrist jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0, 3): cur_jac = symbolic_jacob_der(Rj, xj, xgj, x_r[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.r_Rwrist_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') out_file.write(' // left wrist absolute position\n') for i in range(0,3): out_file.write(' in_out.r_Lwrist[{}] = {};\n'.format(i, x_l[i])) out_file.write('\n') out_file.write(' // left wrist absolute velocity\n') for i in range(0,3): out_file.write(' in_out.rp_Lwrist[{}] = {};\n'.format(i, xp_l[i])) out_file.write('\n') out_file.write(' // left wrist jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0, 3): cur_jac = symbolic_jacob_der(Rj, xj, xgj, x_l[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.r_Lwrist_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') out_file.write(' // wrists absolute orientation\n') for i in range(0, 9): out_file.write(' in_out.Rwrist_or[{}] = {};\n'.format(i, R_r_elb[i])) out_file.write('\n') for i in range(0, 9): out_file.write(' in_out.Lwrist_or[{}] = {};\n'.format(i, R_l_elb[i])) out_file.write('\n') out_file.write(' // right wrist absolute orientation jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0,9): cur_jac = symbolic_jacob_der(Rj, xj, xgj, R_r_elb[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.Rwrist_or_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') out_file.write(' // left wrist absolute orientation jacobian\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range (1, nb_bodies): flag_print = 0 for j in range(0,9): cur_jac = symbolic_jacob_der(Rj, xj, xgj, R_l_elb[j], 'q{}'.format(i+1)) if cur_jac != 0: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write(' in_out.Lwrist_or_der[{}][{}] = {};\n'.format(get_string_enum(joint_id_names[i]), j, cur_jac)) out_file.write(' }\n\n') # get a string for the enumeration of joints def get_string_enum(cur_string): cur_split = cur_string.split('_') if len(cur_split) >= 2: new_string = cur_split[0] for i in range(1, len(cur_split)-1): new_string = '{}{}'.format(new_string, cur_split[i]) else: new_string = cur_string cur_split = filter(None, re.split("([A-Z][^A-Z]*)", new_string)) new_string = cur_split[0].upper() for i in range(1, len(cur_split)): new_string = '{}_{}'.format(new_string, cur_split[i].upper()) return new_string # write the end of the file def write_file_end(out_file): out_file.write('}\n') # print matrix components declaration def write_matrix_declaration(out_file, prefix): out_file.write(' double ') for i in range(0,3): for j in range(0,3): out_file.write('{}{}{}'.format(prefix, i+1, j+1)) if i == 2 and j == 2: out_file.write(';\n') else: out_file.write(', ') # print variables declaration def write_variables_declaration(out_file, prefix, min, max): out_file.write(' double ') for i in range(min, max+1): out_file.write('{}{}'.format(prefix, i)) if i == max: out_file.write(';\n') else: out_file.write(', ') # variables initialization def write_intialization(out_file, nb_bodies, joint_id_names): out_file.write(' // -- variables initialization -- //\n') out_file.write('\n // IMU - rotation matrices\n') for i in range(0, 3): for j in range(0, 3): out_file.write(' IMU{}{} = in_out.IMU_Orientation[{}];\n'.format(i+1, j+1, 3*i+j)) out_file.write('\n // IMU - angles velocity\n') for i in range(0, 3): out_file.write(' omega_{} = in_out.IMU_Angular_Rate[{}];\n'.format(i+1, i)) out_file.write('\n // joint cosines\n') for i in range(1, nb_bodies): out_file.write(' c{} = cos(in_out.q_mot[{}]);\n'.format(i+1, joint_id_names[i])) out_file.write('\n // joint sines\n') for i in range(1, nb_bodies): out_file.write(' s{} = sin(in_out.q_mot[{}]);\n'.format(i+1, joint_id_names[i])) out_file.write('\n // joint relative velocities\n') for i in range(1, nb_bodies): out_file.write(' Om{} = in_out.qd_mot[{}];\n'.format(i+1, joint_id_names[i])) # write symbolic vector and replace symbolic variable by its name def write_symb_vector(out_file, vector, start_name, end_name): new_vector = sp.zeros(3, 1) flag_print = 0 for i in range(0,3): if vector[i] == 0 or vector[i] == 1: new_vector[i] = vector[i] else: flag_print = 1 elem_name = '{}{}{}'.format(start_name, i+1, end_name) out_file.write(' {} = {};\n'.format(elem_name, vector[i]).replace('1.0*','')) new_vector[i] = sp.Symbol(elem_name) if flag_print: out_file.write('\n') return new_vector # write symbolic matrix and replace symbolic variable by its name def write_symb_matrix(out_file, matrix, start_name, end_name): new_matrix = sp.zeros(3, 3) flag_print = 0 for i in range(0,3): for j in range(0,3): if matrix[i,j] == 0 or matrix[i,j] == 1: new_matrix[i,j] = matrix[i,j] else: flag_print = 1 elem_name = '{}{}{}{}'.format(start_name, i+1, j+1, end_name) out_file.write(' {} = {};\n'.format(elem_name, matrix[i,j]).replace('1.0*','')) new_matrix[i,j] = sp.Symbol(elem_name) if flag_print: out_file.write('\n') return new_matrix # save the symbolic vector for print def print_save_symb_vector(vector, start_name, end_name): new_vector = sp.zeros(3, 1) save_vector = 3 * [None] for i in range(0,3): if vector[i] == 0 or vector[i] == 1: new_vector[i] = vector[i] save_vector[i] = None else: elem_name = '{}{}{}'.format(start_name, i+1, end_name) save_vector[i] = ' {} = {};\n'.format(elem_name, vector[i]).replace('1.0*','') new_vector[i] = sp.Symbol(elem_name) return new_vector, save_vector # save the symbolic matrix for print def print_save_symb_matrix(matrix, start_name, end_name): new_matrix = sp.zeros(3, 3) save_matrix = 9 * [None] for i in range(0,3): for j in range(0,3): if matrix[i,j] == 0 or matrix[i,j] == 1: new_matrix[i,j] = matrix[i,j] save_matrix[3*i+j] = None else: elem_name = '{}{}{}{}'.format(start_name, i+1, j+1, end_name) save_matrix[3*i+j] = ' {} = {};\n'.format(elem_name, matrix[i,j]).replace('1.0*','') new_matrix[i,j] = sp.Symbol(elem_name) return new_matrix, save_matrix # write symbolic jacobian of a rotation matrix def write_symb_Rj(nb_bodies, Rj, xj, xgj, Rj_print, R_matrix, index): # loop on all the joints for i in range (1, nb_bodies): new_matrix = sp.zeros(3, 3) # loop on all the matrix elements for j in range(0, 9): new_matrix[j] = symbolic_jacob_der(Rj, xj, xgj, R_matrix[j], 'q{}'.format(i+1)) [Rj[index-1][i], Rj_print[index-1][i]] = print_save_symb_matrix(new_matrix, 'R{}_'.format(index), '_d{}'.format(i+1)) # write symbolic jacobian of an anchor point def write_symb_xj(nb_bodies, Rj, xj, xgj, xj_print, x_vector, index): # loop on all the joints for i in range (1, nb_bodies): new_vector = sp.zeros(3, 1) # loop on all the vector elements for j in range(0, 3): new_vector[j] = symbolic_jacob_der(Rj, xj, xgj, x_vector[j], 'q{}'.format(i+1)) [xj[index-1][i], xj_print[index-1][i]] = print_save_symb_vector(new_vector, 'x{}_'.format(index), '_d{}'.format(i+1)) # write symbolic jacobian of a com point def write_symb_xgj(nb_bodies, Rj, xj, xgj, xgj_print, x_vector, index): # loop on all the joints for i in range (1, nb_bodies): new_vector = sp.zeros(3, 1) # loop on all the vector elements for j in range(0, 3): new_vector[j] = symbolic_jacob_der(Rj, xj, xgj, x_vector[j], 'q{}'.format(i+1)) [xgj[index-1][i], xgj_print[index-1][i]] = print_save_symb_vector(new_vector, 'xg{}_'.format(index), '_d{}'.format(i+1)) # symbolic computation def symbolic_computation(out_file, nb_bodies, joint_id_names, rot_axis, parent_body_index, Dpt, Dg, M): out_file.write('\n\n // -- symbolic computation -- //\n') # Rj, xj, xgj and xgj (jacobian) Rj = nb_bodies*[None] xj = nb_bodies*[None] xgj = nb_bodies*[None] Rj_print = nb_bodies*[None] xj_print = nb_bodies*[None] xgj_print = nb_bodies*[None] for i in range(0, nb_bodies): Rj[i] = nb_bodies*[None] xj[i] = nb_bodies*[None] xgj[i] = nb_bodies*[None] Rj_print[i] = nb_bodies*[None] xj_print[i] = nb_bodies*[None] xgj_print[i] = nb_bodies*[None] for j in range(0, nb_bodies-1): Rj[i][j] = sp.zeros(3, 3) xj[i][j] = sp.zeros(3, 1) xgj[i][j] = sp.zeros(3, 1) Rj_print[i][j] = 9 * [None] xj_print[i][j] = 3 * [None] xgj_print[i][j] = 3 * [None] # rotation matrices out_file.write('\n // rotation matrices\n') R = nb_bodies*[None] Rt = nb_bodies*[None] Rd = nb_bodies*[None] Rd[0] = sp.zeros(3, 3) R[0] = sp.zeros(3, 3) for i in range(0, 3): for j in range(0, 3): R[0][i,j] = sp.Symbol('IMU{}{}'.format(i+1, j+1)) write_symb_Rj(nb_bodies, Rj, xj, xgj, Rj_print, R[0], 1) R[0] = write_symb_matrix(out_file, R[0], 'R1_', '') Rt[0] = R[0].T for i in range(1, nb_bodies): Rd[i] = get_rotation_matrix(rot_axis[i], 1, sp.Symbol('c{}'.format(i+1)), sp.Symbol('s{}'.format(i+1))) R[i] = Rd[i] * R[parent_body_index[i]] write_symb_Rj(nb_bodies, Rj, xj, xgj, Rj_print, R[i], i+1) R[i] = write_symb_matrix(out_file, R[i], 'R{}_'.format(i+1), '') Rt[i] = R[i].T # jacobian rotation matrices out_file.write('\n // jacobian rotation matrices\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range(0, nb_bodies): for j in range(1, nb_bodies): flag_print = 0 for k in range(0, 9): if Rj_print[i][j][k] != None: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write('{}'.format(Rj_print[i][j][k])) out_file.write(' }\n') # omega out_file.write('\n // joint absolute velocities\n') Om = nb_bodies*[None] om = nb_bodies*[None] om_tilde = nb_bodies*[None] Om[0] = sp.zeros(3, 1) om[0] = sp.zeros(3, 1) for i in range(0,3): om[0][i] = sp.Symbol('omega_{}'.format(i+1)) om[0] = write_symb_vector(out_file, om[0], 'om1_', '') om_tilde[0] = get_tilde(om[0]) for i in range(1, nb_bodies): parent_id = parent_body_index[i] Om[i] = get_vector_axis(rot_axis[i], 1, sp.Symbol('Om{}'.format(i+1))) om[i] = om[parent_id] + Rt[parent_id] * Om[i] om[i] = write_symb_vector(out_file, om[i], 'om{}_'.format(i+1), '') om_tilde[i] = get_tilde(om[i]) # x & xp out_file.write('\n // anchor point absolute positions and velocities\n') x = nb_bodies*[None] xp = nb_bodies*[None] x[0] = Rt[0] * Dpt[0] xp[0] = om_tilde[0] * (Rt[0] * Dpt[0]) write_symb_xj(nb_bodies, Rj, xj, xgj, xj_print, x[0], 1) x[0] = write_symb_vector(out_file, x[0], 'x1_', '') xp[0] = write_symb_vector(out_file, xp[0], 'xp1_', '') for i in range(1, nb_bodies): parent_id = parent_body_index[i] x[i] = x[parent_id] + Rt[parent_id] * Dpt[i] xp[i] = xp[parent_id] + om_tilde[parent_id] * (Rt[parent_id] * Dpt[i]) write_symb_xj(nb_bodies, Rj, xj, xgj, xj_print, x[i], i+1) x[i] = write_symb_vector(out_file, x[i], 'x{}_'.format(i+1), '') xp[i] = write_symb_vector(out_file, xp[i], 'xp{}_'.format(i+1), '') # jacobian x out_file.write('\n // jacobian anchor point positions\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range(0, nb_bodies): for j in range(1, nb_bodies): flag_print = 0 for k in range(0, 3): if xj_print[i][j][k] != None: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write('{}'.format(xj_print[i][j][k])) out_file.write(' }\n') # xg & xgp out_file.write('\n // com absolute positions and velocities\n') xg = nb_bodies*[None] xgp = nb_bodies*[None] for i in range(0, nb_bodies): xg[i] = x[i] + Rt[i] * Dg[i] xgp[i] = xp[i] + om_tilde[i] * (Rt[i] * Dg[i]) write_symb_xgj(nb_bodies, Rj, xj, xgj, xgj_print, xg[i], i+1) xg[i] = write_symb_vector(out_file, xg[i], 'xg{}_'.format(i+1), '') xgp[i] = write_symb_vector(out_file, xgp[i], 'xgp{}_'.format(i+1), '') # jacobian xg out_file.write('\n // jacobian com absolute positions\n') out_file.write(' if (flag_jacob)\n {\n') flag_first = 0 for i in range(0, nb_bodies): for j in range(1, nb_bodies): flag_print = 0 for k in range(0, 3): if xgj_print[i][j][k] != None: if not flag_first: flag_first = 1 flag_print = 1 elif not flag_print: flag_print = 1 out_file.write('\n') out_file.write('{}'.format(xgj_print[i][j][k])) out_file.write(' }\n') # results out_file.write('\n // -- Collecting results -- //\n\n') com_compute(out_file, nb_bodies, joint_id_names, M, xg, xgp, xgj) feet_compute(out_file, joint_id_names, R, x, xp, om, Rj, xj, xgj, 6, 12, -0.06, 0.08, -0.045, 0.045) wrists_compute(out_file, joint_id_names, R, x, xp, om, Rj, xj, xgj, 19, 23, -0.02, -0.005, -0.225) torso_waist_angles(out_file, R, om, 0, 15) # generate the symbolic output file def gen_symbolic_out(out_file_name, nb_bodies, rot_axis, parent_body_index, joint_id_names, Dpt, Dg, M): # temporary file in_temp = './{}_temp.cc'.format(out_file_name) file_temp = open(in_temp, 'w') # beginning of the file write_file_beginning(file_temp, joint_id_names) # variables initialization write_intialization(file_temp, nb_bodies, joint_id_names) # symbolic computation symbolic_computation(file_temp, nb_bodies, joint_id_names, rot_axis, parent_body_index, Dpt, Dg, M) # end of the file write_file_end(file_temp) file_temp.close() # output file out_file = open('./{}.cc'.format(out_file_name), 'w') with open(in_temp, 'r') as f: # loop on all the lines for line in f: # declaration if len(line.split('// -- variables initialization -- //')) != 1: out_file.write(' // -- variables declaration -- //\n\n') print_all_declaration(in_temp, out_file, 100) out_file.write('\n\n') # copy temporary file out_file.write(line) out_file.close() # remove temporary file os.remove(in_temp) # main script # rotation axis for each joint before body i (1:x, 2:y, 3:z) rot_axis = np.array([0, # waist 2, 1, 3, 2, 1, 2, # right leg 2, 1, 3, 2, 1, 2, # left leg 1, 2, 3, # trunk 2, 1, 3, 2, # right arm 2, 1, 3, 2 # left arm ]) # parent index parent_body_index = np.array([ -1, # waist 0, 1, 2, 3, 4, 5, # right leg 0, 7, 8, 9, 10, 11, # left leg 0, 13, 14, # trunk 15, 16, 17, 18, # right arm 15, 20, 21, 22 # left arm ]) nb_bodies = len(parent_body_index) ## anchor point positions Dpt = nb_bodies*[None] # waist Dpt[0] = sp.Matrix([0.0, 0.0, 0.0]) # right leg Dpt[1] = sp.Matrix([0.0, sp.Symbol('DPT_2_2'), 0.0]) Dpt[2] = sp.Matrix([0.0, sp.Symbol('DPT_2_6'), 0.0]) Dpt[3] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_8')]) Dpt[4] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_10')]) Dpt[5] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_12')]) Dpt[6] = sp.Matrix([0.0, 0.0, 0.0]) # left leg Dpt[7] = sp.Matrix([0.0, sp.Symbol('DPT_2_3'), 0.0]) Dpt[8] = sp.Matrix([0.0, sp.Symbol('DPT_2_18'), 0.0]) Dpt[9] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_20')]) Dpt[10] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_22')]) Dpt[11] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_24')]) Dpt[12] = sp.Matrix([0.0, 0.0, 0.0]) # trunk Dpt[13] = sp.Matrix([sp.Symbol('DPT_1_4'), 0.0, sp.Symbol('DPT_3_4')]) Dpt[14] = sp.Matrix([0.0, 0.0, 0.0]) Dpt[15] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_32')]) # right arm Dpt[16] = sp.Matrix([sp.Symbol('DPT_1_36'), sp.Symbol('DPT_2_36'), sp.Symbol('DPT_3_36')]) Dpt[17] = sp.Matrix([0.0, sp.Symbol('DPT_2_39'), 0.0]) Dpt[18] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_41')]) Dpt[19] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_43')]) # left arm Dpt[20] = sp.Matrix([sp.Symbol('DPT_1_37'), sp.Symbol('DPT_2_37'), sp.Symbol('DPT_3_37')]) Dpt[21] = sp.Matrix([0.0, sp.Symbol('DPT_2_46'), 0.0]) Dpt[22] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_48')]) Dpt[23] = sp.Matrix([0.0, 0.0, sp.Symbol('DPT_3_50')]) ## COM positions Dg = nb_bodies*[None] # waist Dg[0] = sp.Matrix([sp.Symbol('L_1_6'), sp.Symbol('L_2_6'), sp.Symbol('L_3_6')]) # right leg Dg[1] = sp.Matrix([sp.Symbol('L_1_7') , sp.Symbol('L_2_7') , sp.Symbol('L_3_7')]) Dg[2] = sp.Matrix([sp.Symbol('L_1_8') , sp.Symbol('L_2_8') , sp.Symbol('L_3_8')]) Dg[3] = sp.Matrix([sp.Symbol('L_1_9') , sp.Symbol('L_2_9') , sp.Symbol('L_3_9')]) Dg[4] = sp.Matrix([sp.Symbol('L_1_10'), sp.Symbol('L_2_10'), sp.Symbol('L_3_10')]) Dg[5] = sp.Matrix([sp.Symbol('L_1_11'), sp.Symbol('L_2_11'), sp.Symbol('L_3_11')]) Dg[6] = sp.Matrix([sp.Symbol('L_1_12'), 0.0 , sp.Symbol('L_3_12')]) # left leg Dg[7] = sp.Matrix([sp.Symbol('L_1_13'), sp.Symbol('L_2_13'), sp.Symbol('L_3_13')]) Dg[8] = sp.Matrix([sp.Symbol('L_1_14'), sp.Symbol('L_2_14'), sp.Symbol('L_3_14')]) Dg[9] = sp.Matrix([sp.Symbol('L_1_15'), sp.Symbol('L_2_15'), sp.Symbol('L_3_15')]) Dg[10] = sp.Matrix([sp.Symbol('L_1_16'), sp.Symbol('L_2_16'), sp.Symbol('L_3_16')]) Dg[11] = sp.Matrix([sp.Symbol('L_1_17'), sp.Symbol('L_2_17'), sp.Symbol('L_3_17')]) Dg[12] = sp.Matrix([sp.Symbol('L_1_18'), 0.0 , sp.Symbol('L_3_18')]) # trunk Dg[13] = sp.Matrix([sp.Symbol('L_1_19'), sp.Symbol('L_2_19'), sp.Symbol('L_3_19')]) Dg[14] = sp.Matrix([sp.Symbol('L_1_20'), sp.Symbol('L_2_20'), sp.Symbol('L_3_20')]) Dg[15] = sp.Matrix([sp.Symbol('L_1_21'), sp.Symbol('L_2_21'), sp.Symbol('L_3_21')]) # right arm Dg[16] = sp.Matrix([sp.Symbol('L_1_22'), sp.Symbol('L_2_22'), sp.Symbol('L_3_22')]) Dg[17] = sp.Matrix([sp.Symbol('L_1_23'), sp.Symbol('L_2_23'), sp.Symbol('L_3_23')]) Dg[18] = sp.Matrix([sp.Symbol('L_1_24'), sp.Symbol('L_2_24'), sp.Symbol('L_3_24')]) Dg[19] = sp.Matrix([sp.Symbol('L_1_25'), sp.Symbol('L_2_25'), sp.Symbol('L_3_25')]) # left arm Dg[20] = sp.Matrix([sp.Symbol('L_1_26'), sp.Symbol('L_2_26'), sp.Symbol('L_3_26')]) Dg[21] = sp.Matrix([sp.Symbol('L_1_27'), sp.Symbol('L_2_27'), sp.Symbol('L_3_27')]) Dg[22] = sp.Matrix([sp.Symbol('L_1_28'), sp.Symbol('L_2_28'), sp.Symbol('L_3_28')]) Dg[23] = sp.Matrix([sp.Symbol('L_1_29'), sp.Symbol('L_2_29'), sp.Symbol('L_3_29')]) # masses M = np.array([ 'M_6', # waist 'M_7' , 'M_8' , 'M_9' , 'M_10', 'M_11', 'M_12', # right leg 'M_13', 'M_14', 'M_15', 'M_16', 'M_17', 'M_18', # left leg 'M_19', 'M_20', 'M_21', # trunk 'M_22', 'M_23', 'M_24', 'M_25', # right arm 'M_26', 'M_27', 'M_28', 'M_29' # left arm ]) # joint names joint_id_names = np.array(['0', # waist 'RightHipPitch_id', 'RightHipRoll_id', 'RightHipYaw_id', 'RightKneePitch_id', 'RightFootRoll_id', 'RightFootPitch_id', # right leg 'LeftHipPitch_id' , 'LeftHipRoll_id' , 'LeftHipYaw_id' , 'LeftKneePitch_id' , 'LeftFootRoll_id' , 'LeftFootPitch_id' , # left leg 'TorsoRoll_id' , 'TorsoPitch_id' , 'TorsoYaw_id' , # trunk 'RightShPitch_id' , 'RightShRoll_id' , 'RightShYaw_id' , 'RightElbPitch_id', # right arm 'LeftShPitch_id' , 'LeftShRoll_id' , 'LeftShYaw_id' , 'LeftElbPitch_id' # left arm ]) out_file_name = 'forward_kinematics' gen_symbolic_out(out_file_name, nb_bodies, rot_axis, parent_body_index, joint_id_names, Dpt, Dg, M)
2.15625
2
config.py
lyth031/ptb_lm
0
2712
# -*- coding: utf-8 -*- class Config(object): def __init__(self): self.init_scale = 0.1 self.learning_rate = 1.0 self.max_grad_norm = 5 self.num_layers = 2 self.slice_size = 30 self.hidden_size = 200 self.max_epoch = 13 self.keep_prob = 0.8 self.lr_const_epoch = 4 self.lr_decay = 0.7 self.batch_size = 30 self.vocab_size = 10000 self.rnn_model = "gru" self.data_path = "./data/" self.save_path = "../out/cudnn/gru/"
1.234375
1
cvp_rest_api_examples/cvpLabelAdd.py
kakkotetsu/CVP-Scripts
8
2728
#!/usrb/bin/env python # Copyright (c) 2019, Arista Networks, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # - Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # - Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # - Neither the name of Arista Networks nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF NOT ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #!/usr/bin/env python import requests import json import argparse import urllib3 def parseArgs(): parser = argparse.ArgumentParser() parser.add_argument( '-c', '--cvpName', required=True, help='cvp name' ) parser.add_argument( '-u', '--userId', help='username', default='cvpadmin') parser.add_argument( '-p', '--password', help='password', default='<PASSWORD>') args = vars( parser.parse_args() ) return args.pop( 'cvpName' ), args def getCvpInfo( cvpName ): api = 'cvpInfo/getCvpInfo.do' url = 'https://%s:443/web/%s' % ( cvpName, api ) print 'calling url: ', url return requests.get( url, cookies=cookies, verify=False ) def addDeviceToLabel( cvpName, label, deviceMac ): api = 'label/labelAssignToDevice.do' url = 'https://%s:443/web/%s' % ( cvpName, api ) body = {'label': label, 'device': deviceMac} print 'calling url: ', url return requests.post( url, cookies=cookies, data=json.dumps(body), verify=False ) def authenticate( cvpName, loginInfo ): url = 'https://%s:443/web/login/authenticate.do' % ( cvpName, ) return requests.post( url, json.dumps( loginInfo ), verify=False ) if __name__ == '__main__': urllib3.disable_warnings() cvpName, loginInfo = parseArgs() cookies = authenticate( cvpName, loginInfo ).cookies #print json.loads(getCvpInfo( cvpName ).text) #print getCvpInfo( cvpName ).json() print 'getCvpInfo:' print json.dumps(getCvpInfo( cvpName ).json(), indent=2) # ADD DEVICE TO LABEL # label = "{ tagType: tagValue }" label = "mlag:mlagNY" device = "de:ad:be:ef:ca:fe" print 'addDeviceToLabel:', label, device print json.dumps(addDeviceToLabel( cvpName, label, device ).json(), indent=2)
1.304688
1
train.py
kushaliitm/deep-learning
0
2736
import argparse import helper as hp import torch import os import json parser = argparse.ArgumentParser(description = 'train.py') parser.add_argument('--data-dir', nargs = '*', action = "store", default = "./flowers/", help = "folder path for data") parser.add_argument('--save-dir', action = "store", required=True, help = "filepath for saving checkpoint") parser.add_argument('--learning-rate', action = "store", default = 0.001, help = "learning rate for the optimizer") parser.add_argument('--epoch-num', action = "store", type = int, default = 3, help = "epoch value") parser.add_argument('--architecture', action = "store", default = "vgg16", type = str, help = "specify the neural network structure: vgg16 or densenet121") parser.add_argument('--hidden-size', type = int, action = "store", default = 1000, help = "state the units for fc2") parser.add_argument('--optimizer', action='store', default='adam', help='Optimizer to optimize') pa = parser.parse_args() pa = vars(pa) print(pa) data_path = pa['data_dir'] save_dir = pa["save_dir"] learning_rate = pa['learning_rate'] architecture = pa['architecture'] hidden_size = pa['hidden_size'] epoch_number = pa['epoch_num'] if (not os.path.exists(f'experiments/{save_dir}')): os.makedirs(f'experiments/{save_dir}') file_path = f'experiments/{save_dir}/checkpoint.pt' # saving parameters with open(f'experiments/{save_dir}/parameters.json', 'w') as f: json.dump(pa, f) # load the data - data_load() from help.py print('Loading data') train_loader, validation_loader, test_loader = hp.load_data(data_path) criterion = torch.nn.NLLLoss() # build model print(f'Loading weights from {architecture}') model, optimizer = hp.get_model_and_optimizer(pa) # train model print('Training model') hp.train_model(model, optimizer, learning_rate,train_loader,validation_loader,criterion,epoch_number, file_path) # checkpoint the model print("model has been successfully trained")
1.835938
2
L3_numpy_pandas_2D/B_NumPy_Axis.py
angelmtenor/IDAFC
0
2752
import numpy as np # Change False to True for this block of code to see what it does # NumPy axis argument if True: a = np.array([ [1, 2, 3], [4, 5, 6], [7, 8, 9] ]) print(a.sum()) print(a.sum(axis=0)) print(a.sum(axis=1)) # Subway ridership for 5 stations on 10 different days ridership = np.array([ [0, 0, 2, 5, 0], [1478, 3877, 3674, 2328, 2539], [1613, 4088, 3991, 6461, 2691], [1560, 3392, 3826, 4787, 2613], [1608, 4802, 3932, 4477, 2705], [1576, 3933, 3909, 4979, 2685], [95, 229, 255, 496, 201], [2, 0, 1, 27, 0], [1438, 3785, 3589, 4174, 2215], [1342, 4043, 4009, 4665, 3033] ]) def min_and_max_riders_per_day(ridership): """ Fill in this function. First, for each subway station, calculate the mean ridership per day. Then, out of all the subway stations, return the maximum and minimum of these values. That is, find the maximum mean-ridership-per-day and the minimum mean-ridership-per-day for any subway station. """ mean_ridership_per_day = ridership.mean(axis=0) max_daily_ridership = mean_ridership_per_day.max() # Replace this with your code min_daily_ridership = mean_ridership_per_day.min() # Replace this with your code return max_daily_ridership, min_daily_ridership print(min_and_max_riders_per_day(ridership))
2.84375
3
gva/data/validator/__init__.py
gva-jhabte/gva-data
0
2760
""" Schema Validation Tests a dictionary against a schema to test for conformity. Schema definition is similar to - but not the same as - avro schemas Supported Types: - string - a character sequence - format - numeric - a number - min: - max - date - a datetime.date or an iso format date or time - boolean - a boolean or a binary value (true/false, on/off, yes/no) - symbols - other - not one of the above, but a required field - nullable - Python Falsy (None, 0, Empty String, etc) - enum - - symbols Example Schema: { "name": "Table Name", "fields": [ {"name": "id", "type": "string"}, {"name": "country", "type": ["string", "nullable"]}, {"name": "followers", "type": ["string", "nullable"]} ] } Notes: - type(var).__name__ in (set) is faster than isinstance """ import datetime from typing import List, Any, Union, Callable import os import re from ...utils.json import serialize, parse VALID_BOOLEAN_VALUES = ("true", "false", "on", "off", "yes", "no", "0", "1") DEFAULT_MIN = -9223372036854775808 DEFAULT_MAX = 9223372036854775807 class is_string(): __slots__ = ['pattern', 'regex'] def __init__(self, **kwargs): self.regex = None self.pattern = kwargs.get('format') if self.pattern: self.regex = re.compile(self.pattern) def __call__(self, value: Any) -> bool: if self.pattern is None: return type(value).__name__ == "str" else: return self.regex.match(str(value)) def __str__(self): if self.pattern: return f'string ({self.pattern})' else: return 'string' class is_valid_enum(): __slots__ = ['symbols'] def __init__(self, **kwargs): """ -> "type": "enum", "symbols": ["up", "down"] symbols: list of allowed values (case sensitive) """ self.symbols = kwargs.get('symbols', ()) def __call__(self, value: Any) -> bool: return value and value in self.symbols def __str__(self): return f'enum {self.symbols}' class is_boolean(is_valid_enum): def __init__(self, **kwargs): """ is_boolean is a specific case of is_valid_enum - it defaults to a set of true/false values - the check is case insensitive """ super().__init__() if len(self.symbols) == 0: self.symbols = VALID_BOOLEAN_VALUES def __call__(self, value: Any) -> bool: return super().__call__(str(value).lower()) class is_numeric(): __slots__ = ['min', 'max'] def __init__(self, **kwargs): """ -> "type": "numeric", "min": 0, "max": 100 min: low end of valid range max: high end of valid range """ self.min = kwargs.get('min', DEFAULT_MIN) self.max = kwargs.get('max', DEFAULT_MAX) def __call__(self, value: Any) -> bool: try: n = float(value) except (ValueError, TypeError): return False return self.min <= n <= self.max def __str__(self): if self.min == DEFAULT_MIN and self.max == DEFAULT_MAX: return 'numeric' if not self.min == DEFAULT_MIN and not self.max == DEFAULT_MAX: return f'numeric ({self.min} - {self.max})' if not self.min == DEFAULT_MIN and self.max == DEFAULT_MAX: return f'numeric ({self.min} - infinity)' if self.min == DEFAULT_MIN and not self.max == DEFAULT_MAX: return f'numeric (infinity - {self.max})' def is_date(value: Any) -> bool: try: if type(value).__name__ in ("datetime", "date", "time"): return True datetime.datetime.fromisoformat(value) return True except (ValueError, TypeError): return False def is_null(value: Any) -> bool: return not value def other_validator(value: Any) -> bool: return True def is_list(value: Any) -> bool: return type(value).__name__ == 'list' """ Create a dictionary of the validator functions """ SIMPLE_VALIDATORS = { "date": is_date, "nullable": is_null, "other": other_validator, "list": is_list, "array": is_list, } COMPLEX_VALIDATORS = { "enum": is_valid_enum, "numeric": is_numeric, "string": is_string, "boolean": is_boolean } def get_validators( type_descriptor: Union[List[str], str], **kwargs): """ For a given type definition (the ["string", "nullable"] bit), return the matching validator functions (the _is_x ones) as a list. """ if not type(type_descriptor).__name__ == 'list': type_descriptor = [type_descriptor] # type:ignore validators: List[Any] = [] for descriptor in type_descriptor: if descriptor in COMPLEX_VALIDATORS: validators.append(COMPLEX_VALIDATORS[descriptor](**kwargs)) else: validators.append(SIMPLE_VALIDATORS[descriptor]) return validators def field_validator(value, validators: set) -> bool: """ Execute a set of validator functions (the _is_x) against a value. Return True if any of the validators are True. """ return any([True for validator in validators if validator(value)]) class Schema(): def __init__(self, definition: Union[dict, str]): """ Compile a validator for a given schema. paramaters: - definition: a dictionary, text representation of a dictionary (JSON) or a JSON file containing a schema definition """ # if we have a schema as a string, load it into a dictionary if type(definition).__name__ == 'str': if os.path.exists(definition): # type:ignore definition = parse(open(definition, mode='r').read()) # type:ignore else: definition = parse(definition) # type:ignore try: # read the schema and look up the validators self._validators = { item.get('name'): get_validators( item['type'], symbols=item.get('symbols'), min=item.get('min', DEFAULT_MIN), # 64bit signed (not a limit, just a default) max=item.get('max', DEFAULT_MAX), # 64bit signed (not a limit, just a default) format=item.get('format')) for item in definition.get('fields', []) #type:ignore } except KeyError: raise ValueError("Invalid type specified in schema - valid types are: string, numeric, date, boolean, nullable, list, enum") if len(self._validators) == 0: raise ValueError("Invalid schema specification") def validate(self, subject: dict = {}, raise_exception=False) -> bool: result = True self.last_error = '' for key, value in self._validators.items(): if not field_validator(subject.get(key), self._validators.get(key, [other_validator])): result = False for v in value: self.last_error += f"'{key}' ({subject.get(key)}) did not pass validator {str(v)}.\n" if raise_exception and not result: raise ValueError(F"Record does not conform to schema - {self.last_error}. ") return result def __call__(self, subject: dict = {}, raise_exception=False) -> bool: # wrap the validate function return self.validate(subject=subject, raise_exception=raise_exception)
2.6875
3
tools/test_net.py
by-liu/SegLossBia
18
2776
import sys import logging from seglossbias.utils import mkdir, setup_logging from seglossbias.engine import default_argument_parser, load_config, DefaultTester logger = logging.getLogger(__name__) def setup(args): cfg = load_config(args) mkdir(cfg.OUTPUT_DIR) setup_logging(output_dir=cfg.OUTPUT_DIR) return cfg def main(): args = default_argument_parser().parse_args() cfg = setup(args) logger.info("Launch command : ") logger.info(" ".join(sys.argv)) tester = DefaultTester(cfg) tester.test() if __name__ == "__main__": main()
1.25
1
algorithms/521. Longest Uncommon Subsequence I.py
woozway/py3-leetcode
1
2792
class Solution: def findLUSlength(self, a: str, b: str) -> int: return -1 if a == b else max(len(a), len(b))
1.632813
2
6.爬取豆瓣排行榜电影数据(含GUI界面版)/main.py
shengqiangzhang/examples-of-web-crawlers
12,023
2800
# -*- coding:utf-8 -*- from uiObject import uiObject # main入口 if __name__ == '__main__': ui = uiObject() ui.ui_process()
0.914063
1
python/OpenGeoTile.py
scoofy/open-geotiling
0
2816
from openlocationcode import openlocationcode as olc from enum import Enum import math, re class TileSize(Enum): ''' An area of 20° x 20°. The side length of this tile varies with its location on the globe, but can be up to approximately 2200km. Tile addresses will be 2 characters long.''' GLOBAL = (2, 20.0) ''' An area of 1° x 1°. The side length of this tile varies with its location on the globe, but can be up to approximately 110km. Tile addresses will be 4 characters long.''' REGION = (4, 1.0) ''' An area of 0.05° x 0.05°. The side length of this tile varies with its location on the globe, but can be up to approximately 5.5km. Tile addresses will be 6 characters long.''' DISTRICT = (6, 0.05) ''' An area of 0.0025° x 0.0025°. The side length of this tile varies with its location on the globe, but can be up to approximately 275m. Tile addresses will be 8 characters long.''' NEIGHBORHOOD = (8, 0.0025) ''' An area of 0.000125° x 0.000125°. The side length of this tile varies with its location on the globe, but can be up to approximately 14m. Tile addresses will be 10 characters long.''' PINPOINT = (10, 0.000125) def __init__(self, code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get 0th value''' return self.code_length def getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment # Copy from OpenLocationCode.java # A separator used to break the code into two parts to aid memorability. SEPARATOR = '+' # Copy from OpenLocationCode.java # The character used to pad codes. PADDING_CHARACTER = '0' PADDING_2 = "00" PADDING_4 = "0000" PADDING_6 = "000000" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x in CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET = {x for x in BASE_20_SET if x[0] in ['2', 'X'] or x[1] in ['2', 'X']} NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict = { "N1": NORTH_DIGITS, "E1": EAST_DIGITS, "S1": SOUTH_DIGITS, "W1": WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if not is_tile_address(code): code = code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception("OLC padding larger than allowed by desired_tile_size") code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code = code_address + ("0" * (full_length - len(code_address))) if desired_tile_size == TileSize.PINPOINT: code = code[:-2] + SEPARATOR + code[-2:] else: code = code[:-2] + SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return None return {address+base for address in set_of_addresses for base in BASE_20_SET} class OpenGeoTile(): ''' /** * A wrapper around an {@code OpenLocationCode} object, focusing on the area identified by a prefix * of the given OpenLocationCode. * * Using this wrapper class allows to determine whether two locations are in the same or adjacent * "tiles", to determine all neighboring tiles of a given one, to calculate a distance in tiles etc. * * Open Location Code is a technology developed by Google and licensed under the Apache License 2.0. * For more information, see https://github.com/google/open-location-code * * @author <NAME> * @version 0.1.0 */ Ported by scoofy on 08.31.21 ''' def __init__(self, code=None, tile_size=None, lat=None, long=None, ): if not (code or (code and tile_size) or (lat and long)): raise Exception("Invalid OpenGeoTile constructor arguments") if lat and long: self.constructTileFromLatLong(lat, long, tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, "")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** * Creates a new OpenGeoTile from an existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for the current location. This can be a padded code, in which * case the resulting OpenGeoTile will have a larger TileSize. * @throws IllegalArgumentException if olc is not a full code */''' if not olc.isFull(plus_code): raise Exception("Only full OLC supported. Use olc.recoverNearest().") self.code = plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise Exception("Too precise, sort this later") def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a new OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the current location @param tile_size tile size to use for this OpenGeoTile @throws IllegalArgumentException when trying to pass a short (non-full) OLC, or if OLC has too much padding for given tile_size ''' if not olc.isFull(plus_code): raise Exception("Only full OLC supported. Use recover().") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self, lat: float, long: float, tile_size=None): '''/** * Creates a new OpenGeoTile from lat/long coordinates. * @param latitude latitude of the location * @param longitude longitude of the location * @param tile_size tile size to use for this OpenGeoTile * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not tile_size: tile_size = TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress): '''/** * Creates a new OpenGeoTile from a tile address. * @param tileAddress a tile address is a [2/4/6/8/10]-character string that corresponds to a * valid {@link com.google.openlocationcode.OpenLocationCode} after removing * '+' and an additional number of trailing characters; tile size is * determined by the length of this address * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of * invalid length */''' detectedTileSize = None olcBuilder = "" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6 + SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder += tileAddress + PADDING_4 + SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2 + SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if detectedTileSize == None: print(tileAddress) raise Exception("Invalid tile address") self.tile_size = detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code is effectively redundant as python has no wrapping '''/** * The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. * For the plus code of the whole tile, see {@link #getTileOpenLocationCode()}. * @return the exact plus code wrapped by this OpenGeoTile */''' return self.code def returnCode(self): return self.code def getTileSize(self): '''/** * Get the {@link TileSize} of this OpenGeoTile. * @return the {@link TileSize} of this OpenGeoTile */''' return self.tile_size def getTileAddress(self): '''/** * A tile address is a string of length 2, 4, 6, 8, or 10, which corresponds to a valid * {@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate * number of '0' and '+' characters. Example: Address "CVXW" corresponds to OLC "CVXW0000+" * @return the tile address of this OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self): '''/** * The prefix of a tile address is the address of the next biggest tile at this location. * @return this tile's address with the final two characters removed. In case of a GLOBAL tile, * returns the empty string. */''' if self.tile_size == TileSize.GLOBAL: return "" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code is redundant '''/** * The full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than * {@link #getWrappedOpenLocationCode()}, this will return a full plus code for the whole tile. * @return a plus code for the whole tile, probably padded with '0' characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** * Get an array of the typically 8 neighboring tiles of the same size. * @return an array of the typically 8 neighboring tiles of the same size; * may return less than 8 neighbors for tiles near the poles. */''' # deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included to keep ordered data''' directions_list = ["NW", "N", "NE", "E", "SE", "S", "SW", "W"] direction_dict = { "NW": [+1, -1], "N": [+1, 0], "NE": [+1, +1], "W": [ 0, -1], "E": [ 0, +1], "SW": [-1, -1], "S": [-1, 0], "SE": [-1, +1], } #lat_diff = [+1, +1, +1, 0, -1, -1, -1, 0] #long_diff = [-1, 0, +1, +1, +1, 0, -1, -1] if not type(eight_point_direction) in [type(None), list, str]: raise Exception("eight_point_direction must be of type list or str") if eight_point_direction is None: directions = directions_list elif isinstance(eight_point_direction, str): directions = [] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: ''' this list construction keeps directions in the order above ''' uppercase_input_directions = [d.upper() for d in eight_point_direction] directions = [direction for direction in directions_list if direction in uppercase_input_directions] neighbors = set() for direction in directions: lat_diff, long_diff = direction_dict.get(direction) ''' //OLC constructor clips and normalizes, //so we don't have to deal with invalid lat/long values directly''' neighborLatitude = latitude + (delta * lat_diff) neighborLongitude = longitude + (delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are the same as this one due to clipping near the poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile): '''/** * Check if a tile describes the same area as this one. * @param potentialSameTile the OpenGeoTile to check * @return true if tile sizes and addresses are the same; false if not */''' if potentialSameTile.getTileSize() != self.getTileSize(): return False return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** * Check if a tile is neighboring this one. * @param potentialNeighbor the OpenGeoTile to check * @return true if this and potentialNeighbor are adjacent (8-neighborhood); * false if not */''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over neighbors for same tile''' if self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors() for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return True return False else: '''//tiles of different size are adjacent if at least one neighbor of the smaller tile, //but not the smaller tile itself, is contained within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile = self else: smallerTile = self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors() for neighbor in neighbors: if biggerTile.contains(neighbor): return True return False def contains(self, potentialMember): '''/** * Check if this tile contains another one. * @param potentialMember the OpenGeoTile to check * @return true if the area potentialMember falls within the area of this tile, including cases * where both are the same; false if not */''' # //if A contains B, then B's address has A's address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the Manhattan (city block) distance between this and another tile of the same size. * @param otherTile another tile of the same size as this one * @return an integer value corresponding to the number of tiles of the given size that need to * be traversed getting from one to the other tile * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the Chebyshev (chessboard) distance between this and another tile of the same size. * @param otherTile another tile of the same size as this one * @return an integer value corresponding to the number of tiles of the given size that need to * be traversed getting from one to the other tile * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/** * Returns the approximate direction of the other tile relative to this. The return value can * have a large margin of error, especially for big or far away tiles, so this should only be * interpreted as a very rough approximation and used as such. * @param otherTile another tile of the same size as this one * @return an angle in radians, 0 being an eastward direction, +/- PI being westward direction * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction, to assist in expanding tile areas ''' if not self.isNeighbor(neighborTile): raise Exception("neighborTile must be neighbor") if neighborTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction = "" north_south = None if self_tile_x != other_tile_x: ''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile is above -> neighborTile is north ''' direction = direction + 'N' else: direction = direction + 'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile is above -> neighborTile is north ''' direction = direction + 'N' else: ''' other tile is below -> neighborTile is south ''' direction = direction + 'S' if self_tile_y != other_tile_y: ''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile is right -> neighborTile is east ''' direction = direction + 'E' else: ''' other tile is left -> neighborTile is west ''' direction = direction + 'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile is right -> neighborTile is east ''' direction = direction + 'E' else: ''' other tile is left -> neighborTile is west ''' direction = direction + 'W' return direction def getCharacterIndex(self, c): '''//following definitions copied from OpenLocationCode.java''' index = "23456789CFGHJMPQRVWX".find(c.upper()) if index == -1: raise Exception("Character does not exist in alphabet") return index def characterDistance(self, c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2 + 1] if i == 0: '''//for the first longitudinal value, we need to take care of wrapping - basically, //if it's shorter to go the other way around, do so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #; //360°/20° = 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance += self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is desired size ''' return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception("OLC padding larger than allowed by desired_tile_size") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address in address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception("OLC padding larger than allowed by desired_tile_size") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set() east_set = set() south_set = set() west_set = set() if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction is None: ''' all borders ''' ''' traveling salesman problem ''' ''' let's do it once, and try to reduce by swaping digits ''' all_border_set = memoized_digit_dict.get(f"A{iterations_needed}") if not all_border_set: north_base_set = memoized_digit_dict.get(f"N{iterations_needed}") if not north_base_set: self.memoizeDigitDict("N", iterations_needed) north_set = memoized_digit_dict.get(f"N{iterations_needed}") east_set = memoized_digit_dict.get(f"E{iterations_needed}", set()) south_set = memoized_digit_dict.get(f"S{iterations_needed}", set()) west_set = memoized_digit_dict.get(f"W{iterations_needed}", set()) east_exists = east_set != set() south_exists = south_set != set() west_exists = west_set != set() for base in north_set: east_base = "" south_base = "" west_base = "" base_tuple_list = re.findall('..', base) ''' north will be Xd east dX south 2d west d2''' for n_tuple in base_tuple_list: relevant_digit = n_tuple[1] if not east_exists: east_base += relevant_digit + "X" if not south_exists: south_base += "2" + relevant_digit if not west_exists: west_base += relevant_digit + "2" if not east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f"E{iterations_needed}"] = east_set memoized_digit_dict[f"S{iterations_needed}"] = south_set memoized_digit_dict[f"W{iterations_needed}"] = west_set all_border_set = north_set | east_set | south_set | west_set memoized_digit_dict[f"A{iterations_needed}"] = all_border_set return {OpenGeoTile(address+base) for base in all_border_set} elif len(eight_point_direction) == 1: ''' North, South, East, or West ''' base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for base in base_set} elif len(eight_point_direction) == 2: ''' NW, NE, SW, SE... should return only one tile''' ordinal_digit_dict = { 'NW': 'X2', 'NE': 'XX', 'SE': '2X', 'SW': '22' } base = '' for i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}") if not base_set: quickest_i = 0 for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f"{eight_point_direction}{i + 1}"): quickest_i = i break for i in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f"{eight_point_direction}{i + 1}") next_set = {existing_base + base for existing_base in existing_bases for base in memoized_digit_dict.get(f"{eight_point_direction}1")} memoized_digit_dict[f"{eight_point_direction}{i + 2}"] = next_set
2.390625
2
spot/level1.py
K0gata/SGLI_Python_output_tool
1
2824
import numpy as np import logging from decimal import Decimal, ROUND_HALF_UP from abc import ABC, abstractmethod, abstractproperty from spot.utility import bilin_2d from spot.config import PROJ_TYPE # ============================= # Level-1 template class # ============================= class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file = h5_file self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' + prod_name] # Return uint16 type data if the product is QA_flag or Line_tai93 if 'QA_flag' == prod_name or 'Line_tai93' == prod_name: return dset[:] # Validate data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN to physical value data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if 'Rt_' in prod_name: return 'NA' # Get attrs set unit_name = 'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs # Get unit if unit_name not in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection class # ============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs): interval = kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name] data = dset[:] if 'Latitude' is not data_name and 'Longitude' is not data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if interval is none if interval is None or interval == 'none': return data # Interpolate raw data if interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode = False if 'Longitude' == data_name: lon_mode = True if interp_interval > 1: data = bilin_2d(data, interp_interval, lon_mode) # Trim away the excess pixel/line (data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing level class # ============================= class L1B(Scene): # ----------------------------- # Public # ----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name: return self._get_land_water_flag() if 'Lt_' in prod_name: return self._get_Lt(prod_name) if 'Rt_' in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- # Private # ----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data / cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] = 0 return data > 0 class VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list # EOF
1.515625
2
src/compas_plotters/artists/lineartist.py
XingxinHE/compas
0
2832
from compas_plotters.artists import Artist from matplotlib.lines import Line2D from compas.geometry import intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist): """""" zorder = 1000 def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist = None self._end_artist = None self._segment_artist = None self._draw_points = draw_points self._draw_as_segment = draw_as_segment self.line = line self.linewidth = linewidth self.linestyle = linestyle self.color = color def clip(self): xlim, ylim = self.plotter.viewbox xmin, xmax = xlim ymin, ymax = ylim box = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box) @property def data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points = self.clip() if points: p0, p1 = points x0, y0 = p0[:2] x1, y1 = p1[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if points: p0, p1 = points x0, y0 = p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth)
2.015625
2
validator/delphi_validator/run.py
benjaminysmith/covidcast-indicators
0
2840
# -*- coding: utf-8 -*- """Functions to call when running the tool. This module should contain a function called `run_module`, that is executed when the module is run with `python -m delphi_validator`. """ from delphi_utils import read_params from .validate import Validator def run_module(): """Run the validator as a module.""" parent_params = read_params() params = parent_params['validation'] validator = Validator(params) validator.validate(parent_params["export_dir"]).print_and_exit()
1.414063
1
web/repositories.bzl
Ubehebe/rules_webtesting
0
2856
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Defines external repositories needed by rules_webtesting.""" load("//web/internal:platform_http_file.bzl", "platform_http_file") load("@bazel_gazelle//:deps.bzl", "go_repository") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load("@bazel_tools//tools/build_defs/repo:java.bzl", "java_import_external") # NOTE: URLs are mirrored by an asynchronous review process. They must # be greppable for that to happen. It's OK to submit broken mirror # URLs, so long as they're correctly formatted. Bazel's downloader # has fast failover. def web_test_repositories(**kwargs): """Defines external repositories required by Webtesting Rules. This function exists for other Bazel projects to call from their WORKSPACE file when depending on rules_webtesting using http_archive. This function makes it easy to import these transitive dependencies into the parent workspace. This will check to see if a repository has been previously defined before defining a new repository. Alternatively, individual dependencies may be excluded with an "omit_" + name parameter. This is useful for users who want to be rigorous about declaring their own direct dependencies, or when another Bazel project is depended upon (e.g. rules_closure) that defines the same dependencies as this one (e.g. com_google_guava.) Alternatively, a whitelist model may be used by calling the individual functions this method references. Please note that while these dependencies are defined, they are not actually downloaded, unless a target is built that depends on them. Args: **kwargs: omit_... parameters used to prevent importing specific dependencies. """ if should_create_repository("bazel_skylib", kwargs): bazel_skylib() if should_create_repository("com_github_blang_semver", kwargs): com_github_blang_semver() if should_create_repository("com_github_gorilla_context", kwargs): com_github_gorilla_context() if should_create_repository("com_github_gorilla_mux", kwargs): com_github_gorilla_mux() if should_create_repository("com_github_tebeka_selenium", kwargs): com_github_tebeka_selenium() if should_create_repository("com_github_urllib3", kwargs): com_github_urllib3() if should_create_repository("com_google_code_findbugs_jsr305", kwargs): com_google_code_findbugs_jsr305() if should_create_repository("com_google_code_gson", kwargs): com_google_code_gson() if should_create_repository( "com_google_errorprone_error_prone_annotations", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository("com_google_guava", kwargs): com_google_guava() if should_create_repository("com_squareup_okhttp3_okhttp", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository("com_squareup_okio", kwargs): com_squareup_okio() if should_create_repository("commons_codec", kwargs): commons_codec() if should_create_repository("commons_logging", kwargs): commons_logging() if should_create_repository("junit", kwargs): junit() if should_create_repository("net_bytebuddy", kwargs): net_bytebuddy() if should_create_repository("org_apache_commons_exec", kwargs): org_apache_commons_exec() if should_create_repository("org_apache_httpcomponents_httpclient", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository("org_apache_httpcomponents_httpcore", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository("org_hamcrest_core", kwargs): org_hamcrest_core() if should_create_repository("org_jetbrains_kotlin_stdlib", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository("org_json", kwargs): org_json() if should_create_repository("org_seleniumhq_py", kwargs): org_seleniumhq_py() if should_create_repository("org_seleniumhq_selenium_api", kwargs): org_seleniumhq_selenium_api() if should_create_repository("org_seleniumhq_selenium_remote_driver", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print("The following parameters are unknown: " + str(kwargs.keys())) def should_create_repository(name, args): """Returns whether the name repository should be created. This allows creation of a repository to be disabled by either an "omit_" _+ name parameter or by previously defining a rule for the repository. The args dict will be mutated to remove "omit_" + name. Args: name: The name of the repository that should be checked. args: A dictionary that contains "omit_...": bool pairs. Returns: boolean indicating whether the repository should be created. """ key = "omit_" + name if key in args: val = args.pop(key) if val: return False if native.existing_rule(name): return False return True def browser_repositories(firefox = False, chromium = False, sauce = False): """Sets up repositories for browsers defined in //browsers/.... This should only be used on an experimental basis; projects should define their own browsers. Args: firefox: Configure repositories for //browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10. """ if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name = "bazel_skylib", sha256 = "", strip_prefix = "bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a", urls = [ "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz", "https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz", ], ) def com_github_blang_semver(): go_repository( name = "com_github_blang_semver", importpath = "github.com/blang/semver", sha256 = "3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57", strip_prefix = "semver-3.5.1", urls = [ "https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz", "https://github.com/blang/semver/archive/v3.5.1.tar.gz", ], ) def com_github_gorilla_context(): go_repository( name = "com_github_gorilla_context", importpath = "github.com/gorilla/context", sha256 = "2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03", strip_prefix = "context-1.1.1", urls = [ "https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz", "https://github.com/gorilla/context/archive/v1.1.1.tar.gz", ], ) def com_github_gorilla_mux(): go_repository( name = "com_github_gorilla_mux", importpath = "github.com/gorilla/mux", sha256 = "0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3", strip_prefix = "mux-1.6.2", urls = [ "https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz", "https://github.com/gorilla/mux/archive/v1.6.2.tar.gz", ], ) def com_github_tebeka_selenium(): go_repository( name = "com_github_tebeka_selenium", importpath = "github.com/tebeka/selenium", sha256 = "c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd", strip_prefix = "selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a", urls = [ "https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz", "https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz", ], ) def com_github_urllib3(): http_archive( name = "com_github_urllib3", build_file = str(Label("//build_files:com_github_urllib3.BUILD")), sha256 = "a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", strip_prefix = "urllib3-1.23", urls = [ "https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz", ], ) def com_google_code_findbugs_jsr305(): java_import_external( name = "com_google_code_findbugs_jsr305", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar", "https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar", ], jar_sha256 = "766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7", licenses = ["notice"], # BSD 3-clause ) def com_google_code_gson(): java_import_external( name = "com_google_code_gson", jar_sha256 = "233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar", "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name = "com_google_errorprone_error_prone_annotations", jar_sha256 = "10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar", "https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar", ], licenses = ["notice"], # Apache 2.0 ) def com_google_guava(): java_import_external( name = "com_google_guava", jar_sha256 = "a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar", "https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar", ], licenses = ["notice"], # Apache 2.0 exports = [ "@com_google_code_findbugs_jsr305", "@com_google_errorprone_error_prone_annotations", ], ) def com_saucelabs_sauce_connect(): platform_http_file( name = "com_saucelabs_sauce_connect", licenses = ["by_exception_only"], # SauceLabs EULA amd64_sha256 = "dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52", amd64_urls = [ "https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz", ], macos_sha256 = "920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66", macos_urls = [ "https://saucelabs.com/downloads/sc-4.5.1-osx.zip", ], windows_sha256 = "ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5", windows_urls = [ "https://saucelabs.com/downloads/sc-4.4.12-win32.zip", ], ) def com_squareup_okhttp3_okhttp(): java_import_external( name = "com_squareup_okhttp3_okhttp", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar", "https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar", ], jar_sha256 = "a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e", licenses = ["notice"], # Apache 2.0 deps = [ "@com_squareup_okio", "@com_google_code_findbugs_jsr305", ], ) def com_squareup_okio(): java_import_external( name = "com_squareup_okio", jar_sha256 = "79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar", "https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar", ], licenses = ["notice"], # Apache 2.0 deps = [ "@com_google_code_findbugs_jsr305", "@org_jetbrains_kotlin_stdlib", ], ) def commons_codec(): java_import_external( name = "commons_codec", jar_sha256 = "e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar", "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def commons_logging(): java_import_external( name = "commons_logging", jar_sha256 = "daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar", "https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def junit(): java_import_external( name = "junit", jar_sha256 = "59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", ], licenses = ["reciprocal"], # Eclipse Public License 1.0 testonly_ = 1, deps = ["@org_hamcrest_core"], ) def net_bytebuddy(): java_import_external( name = "net_bytebuddy", jar_sha256 = "4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar", "https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar", ], licenses = ["notice"], # Apache 2.0 deps = ["@com_google_code_findbugs_jsr305"], ) def org_apache_commons_exec(): java_import_external( name = "org_apache_commons_exec", jar_sha256 = "cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar", "https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name = "org_apache_httpcomponents_httpclient", jar_sha256 = "c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar", "https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar", ], licenses = ["notice"], # Apache License, Version 2.0 deps = [ "@org_apache_httpcomponents_httpcore", "@commons_logging", "@commons_codec", ], ) def org_apache_httpcomponents_httpcore(): java_import_external( name = "org_apache_httpcomponents_httpcore", jar_sha256 = "1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar", "https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def org_chromium_chromedriver(): platform_http_file( name = "org_chromium_chromedriver", licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT amd64_sha256 = "71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7", amd64_urls = [ "https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip", ], macos_sha256 = "fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae", macos_urls = [ "https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip", ], windows_sha256 = "a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e", windows_urls = [ "https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip", ], ) def org_chromium_chromium(): platform_http_file( name = "org_chromium_chromium", licenses = ["notice"], # BSD 3-clause (maybe more?) amd64_sha256 = "6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8", amd64_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip", ], macos_sha256 = "084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32", macos_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip", ], windows_sha256 = "d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c", windows_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip", ], ) def org_hamcrest_core(): java_import_external( name = "org_hamcrest_core", jar_sha256 = "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", "https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", ], licenses = ["notice"], # New BSD License testonly_ = 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name = "org_jetbrains_kotlin_stdlib", jar_sha256 = "62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar", "https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def org_json(): java_import_external( name = "org_json", jar_sha256 = "518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar", "https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar", ], licenses = ["notice"], # MIT-style license ) def org_mozilla_firefox(): platform_http_file( name = "org_mozilla_firefox", licenses = ["reciprocal"], # MPL 2.0 amd64_sha256 = "3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de", amd64_urls = [ "https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2", "https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2", ], macos_sha256 = "bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5", macos_urls = [ "https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg", "https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg", ], ) def org_mozilla_geckodriver(): platform_http_file( name = "org_mozilla_geckodriver", licenses = ["reciprocal"], # MPL 2.0 amd64_sha256 = "c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6", amd64_urls = [ "https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz", "https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz", ], macos_sha256 = "ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce", macos_urls = [ "https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz", "https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz", ], ) def org_seleniumhq_py(): http_archive( name = "org_seleniumhq_py", build_file = str(Label("//build_files:org_seleniumhq_py.BUILD")), sha256 = "f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40", strip_prefix = "selenium-3.14.0", urls = [ "https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz", ], ) def org_seleniumhq_selenium_api(): java_import_external( name = "org_seleniumhq_selenium_api", jar_sha256 = "1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar", "https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name = "org_seleniumhq_selenium_remote_driver", jar_sha256 = "284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar", "https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 testonly_ = 1, deps = [ "@com_google_code_gson", "@com_google_guava", "@net_bytebuddy", "@com_squareup_okhttp3_okhttp", "@com_squareup_okio", "@commons_codec", "@commons_logging", "@org_apache_commons_exec", "@org_apache_httpcomponents_httpclient", "@org_apache_httpcomponents_httpcore", "@org_seleniumhq_selenium_api", ], )
1.382813
1
Server.py
dipghoshraj/live-video-streming-with-web-socket
3
2872
import cv2 import io import socket import struct import time import pickle import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam = cv2.VideoCapture("E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4") cam.set(3, 320) cam.set(4, 240) img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame = cam.read() result, frame = cv2.imencode('.jpg', frame, encode_param) # data = zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0) size = len(data) print("{}: {}".format(img_counter, size)) client_socket.sendall(struct.pack(">L", size) + data) img_counter += 1 cam.release()
1.625
2
logger.py
bekaaa/xgboost_tuner
0
2880
#! /usr/bin/env python import logging #--------------------------------------- class logger : ''' A ready to use logging class. All you need to do is set an object with the parameters (log_filename, directory to save it) then whenever you want to add text, type obj.add("some text"). The function obj.close() is not important, I just added it for coverage. You can edit any of the below configuration to whatever you like. ''' def __init__(self, filename, log_dir='../data/log'): self.log = None self.handler = None LOG_PATH = log_dir assert type(filename) == str or filename != '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message): assert type(message) == str self.logger.info(message); return #------------------------------------ def close(self): self.logger.removeHandler(self.handler) return #----------------------------------------
2.640625
3
log/slack_sender.py
SmashKs/BarBarian
0
2888
from slackclient import SlackClient from external import SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p, **k): if '_the_instance' not in cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__ == '__main__': SlackBot().send_msg_to('hello world!!')
1.765625
2
net/net.gyp
codenote/chromium-test
0
2904
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'variables': { 'chromium_code': 1, 'linux_link_kerberos%': 0, 'conditions': [ ['chromeos==1 or OS=="android" or OS=="ios"', { # Disable Kerberos on ChromeOS, Android and iOS, at least for now. # It needs configuration (krb5.conf and so on). 'use_kerberos%': 0, }, { # chromeos == 0 'use_kerberos%': 1, }], ['OS=="android" and target_arch != "ia32"', { # The way the cache uses mmap() is inefficient on some Android devices. # If this flag is set, we hackily avoid using mmap() in the disk cache. # We are pretty confident that mmap-ing the index would not hurt any # existing x86 android devices, but we cannot be so sure about the # variety of ARM devices. So enable it for x86 only for now. 'posix_avoid_mmap%': 1, }, { 'posix_avoid_mmap%': 0, }], ['OS=="ios"', { # Websockets and socket stream are not used on iOS. 'enable_websockets%': 0, # iOS does not use V8. 'use_v8_in_net%': 0, 'enable_built_in_dns%': 0, }, { 'enable_websockets%': 1, 'use_v8_in_net%': 1, 'enable_built_in_dns%': 1, }], ], }, 'includes': [ '../build/win_precompile.gypi', ], 'targets': [ { 'target_name': 'net', 'type': '<(component)', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../crypto/crypto.gyp:crypto', '../sdch/sdch.gyp:sdch', '../third_party/icu/icu.gyp:icui18n', '../third_party/icu/icu.gyp:icuuc', '../third_party/zlib/zlib.gyp:zlib', 'net_resources', ], 'sources': [ 'android/cert_verify_result_android.h', 'android/cert_verify_result_android_list.h', 'android/gurl_utils.cc', 'android/gurl_utils.h', 'android/keystore.cc', 'android/keystore.h', 'android/keystore_openssl.cc', 'android/keystore_openssl.h', 'android/net_jni_registrar.cc', 'android/net_jni_registrar.h', 'android/network_change_notifier_android.cc', 'android/network_change_notifier_android.h', 'android/network_change_notifier_delegate_android.cc', 'android/network_change_notifier_delegate_android.h', 'android/network_change_notifier_factory_android.cc', 'android/network_change_notifier_factory_android.h', 'android/network_library.cc', 'android/network_library.h', 'base/address_family.h', 'base/address_list.cc', 'base/address_list.h', 'base/address_tracker_linux.cc', 'base/address_tracker_linux.h', 'base/auth.cc', 'base/auth.h', 'base/backoff_entry.cc', 'base/backoff_entry.h', 'base/bandwidth_metrics.cc', 'base/bandwidth_metrics.h', 'base/big_endian.cc', 'base/big_endian.h', 'base/cache_type.h', 'base/completion_callback.h', 'base/connection_type_histograms.cc', 'base/connection_type_histograms.h', 'base/crypto_module.h', 'base/crypto_module_nss.cc', 'base/crypto_module_openssl.cc', 'base/data_url.cc', 'base/data_url.h', 'base/directory_lister.cc', 'base/directory_lister.h', 'base/dns_reloader.cc', 'base/dns_reloader.h', 'base/dns_util.cc', 'base/dns_util.h', 'base/escape.cc', 'base/escape.h', 'base/expiring_cache.h', 'base/file_stream.cc', 'base/file_stream.h', 'base/file_stream_context.cc', 'base/file_stream_context.h', 'base/file_stream_context_posix.cc', 'base/file_stream_context_win.cc', 'base/file_stream_metrics.cc', 'base/file_stream_metrics.h', 'base/file_stream_metrics_posix.cc', 'base/file_stream_metrics_win.cc', 'base/file_stream_net_log_parameters.cc', 'base/file_stream_net_log_parameters.h', 'base/file_stream_whence.h', 'base/filter.cc', 'base/filter.h', 'base/int128.cc', 'base/int128.h', 'base/gzip_filter.cc', 'base/gzip_filter.h', 'base/gzip_header.cc', 'base/gzip_header.h', 'base/hash_value.cc', 'base/hash_value.h', 'base/host_mapping_rules.cc', 'base/host_mapping_rules.h', 'base/host_port_pair.cc', 'base/host_port_pair.h', 'base/io_buffer.cc', 'base/io_buffer.h', 'base/ip_endpoint.cc', 'base/ip_endpoint.h', 'base/keygen_handler.cc', 'base/keygen_handler.h', 'base/keygen_handler_mac.cc', 'base/keygen_handler_nss.cc', 'base/keygen_handler_openssl.cc', 'base/keygen_handler_win.cc', 'base/linked_hash_map.h', 'base/load_flags.h', 'base/load_flags_list.h', 'base/load_states.h', 'base/load_states_list.h', 'base/load_timing_info.cc', 'base/load_timing_info.h', 'base/mime_sniffer.cc', 'base/mime_sniffer.h', 'base/mime_util.cc', 'base/mime_util.h', 'base/net_error_list.h', 'base/net_errors.cc', 'base/net_errors.h', 'base/net_errors_posix.cc', 'base/net_errors_win.cc', 'base/net_export.h', 'base/net_log.cc', 'base/net_log.h', 'base/net_log_event_type_list.h', 'base/net_log_source_type_list.h', 'base/net_module.cc', 'base/net_module.h', 'base/net_util.cc', 'base/net_util.h', 'base/net_util_posix.cc', 'base/net_util_win.cc', 'base/network_change_notifier.cc', 'base/network_change_notifier.h', 'base/network_change_notifier_factory.h', 'base/network_change_notifier_linux.cc', 'base/network_change_notifier_linux.h', 'base/network_change_notifier_mac.cc', 'base/network_change_notifier_mac.h', 'base/network_change_notifier_win.cc', 'base/network_change_notifier_win.h', 'base/network_config_watcher_mac.cc', 'base/network_config_watcher_mac.h', 'base/network_delegate.cc', 'base/network_delegate.h', 'base/nss_memio.c', 'base/nss_memio.h', 'base/openssl_private_key_store.h', 'base/openssl_private_key_store_android.cc', 'base/openssl_private_key_store_memory.cc', 'base/platform_mime_util.h', # TODO(tc): gnome-vfs? xdgmime? /etc/mime.types? 'base/platform_mime_util_linux.cc', 'base/platform_mime_util_mac.mm', 'base/platform_mime_util_win.cc', 'base/prioritized_dispatcher.cc', 'base/prioritized_dispatcher.h', 'base/priority_queue.h', 'base/rand_callback.h', 'base/registry_controlled_domains/registry_controlled_domain.cc', 'base/registry_controlled_domains/registry_controlled_domain.h', 'base/request_priority.h', 'base/sdch_filter.cc', 'base/sdch_filter.h', 'base/sdch_manager.cc', 'base/sdch_manager.h', 'base/static_cookie_policy.cc', 'base/static_cookie_policy.h', 'base/sys_addrinfo.h', 'base/test_data_stream.cc', 'base/test_data_stream.h', 'base/upload_bytes_element_reader.cc', 'base/upload_bytes_element_reader.h', 'base/upload_data.cc', 'base/upload_data.h', 'base/upload_data_stream.cc', 'base/upload_data_stream.h', 'base/upload_element.cc', 'base/upload_element.h', 'base/upload_element_reader.cc', 'base/upload_element_reader.h', 'base/upload_file_element_reader.cc', 'base/upload_file_element_reader.h', 'base/upload_progress.h', 'base/url_util.cc', 'base/url_util.h', 'base/winsock_init.cc', 'base/winsock_init.h', 'base/winsock_util.cc', 'base/winsock_util.h', 'base/zap.cc', 'base/zap.h', 'cert/asn1_util.cc', 'cert/asn1_util.h', 'cert/cert_database.cc', 'cert/cert_database.h', 'cert/cert_database_android.cc', 'cert/cert_database_ios.cc', 'cert/cert_database_mac.cc', 'cert/cert_database_nss.cc', 'cert/cert_database_openssl.cc', 'cert/cert_database_win.cc', 'cert/cert_status_flags.cc', 'cert/cert_status_flags.h', 'cert/cert_trust_anchor_provider.h', 'cert/cert_verifier.cc', 'cert/cert_verifier.h', 'cert/cert_verify_proc.cc', 'cert/cert_verify_proc.h', 'cert/cert_verify_proc_android.cc', 'cert/cert_verify_proc_android.h', 'cert/cert_verify_proc_mac.cc', 'cert/cert_verify_proc_mac.h', 'cert/cert_verify_proc_nss.cc', 'cert/cert_verify_proc_nss.h', 'cert/cert_verify_proc_openssl.cc', 'cert/cert_verify_proc_openssl.h', 'cert/cert_verify_proc_win.cc', 'cert/cert_verify_proc_win.h', 'cert/cert_verify_result.cc', 'cert/cert_verify_result.h', 'cert/crl_set.cc', 'cert/crl_set.h', 'cert/ev_root_ca_metadata.cc', 'cert/ev_root_ca_metadata.h', 'cert/multi_threaded_cert_verifier.cc', 'cert/multi_threaded_cert_verifier.h', 'cert/nss_cert_database.cc', 'cert/nss_cert_database.h', 'cert/pem_tokenizer.cc', 'cert/pem_tokenizer.h', 'cert/single_request_cert_verifier.cc', 'cert/single_request_cert_verifier.h', 'cert/test_root_certs.cc', 'cert/test_root_certs.h', 'cert/test_root_certs_mac.cc', 'cert/test_root_certs_nss.cc', 'cert/test_root_certs_openssl.cc', 'cert/test_root_certs_android.cc', 'cert/test_root_certs_win.cc', 'cert/x509_cert_types.cc', 'cert/x509_cert_types.h', 'cert/x509_cert_types_mac.cc', 'cert/x509_cert_types_win.cc', 'cert/x509_certificate.cc', 'cert/x509_certificate.h', 'cert/x509_certificate_ios.cc', 'cert/x509_certificate_mac.cc', 'cert/x509_certificate_net_log_param.cc', 'cert/x509_certificate_net_log_param.h', 'cert/x509_certificate_nss.cc', 'cert/x509_certificate_openssl.cc', 'cert/x509_certificate_win.cc', 'cert/x509_util.h', 'cert/x509_util.cc', 'cert/x509_util_ios.cc', 'cert/x509_util_ios.h', 'cert/x509_util_mac.cc', 'cert/x509_util_mac.h', 'cert/x509_util_nss.cc', 'cert/x509_util_nss.h', 'cert/x509_util_openssl.cc', 'cert/x509_util_openssl.h', 'cookies/canonical_cookie.cc', 'cookies/canonical_cookie.h', 'cookies/cookie_monster.cc', 'cookies/cookie_monster.h', 'cookies/cookie_options.h', 'cookies/cookie_store.cc', 'cookies/cookie_store.h', 'cookies/cookie_util.cc', 'cookies/cookie_util.h', 'cookies/parsed_cookie.cc', 'cookies/parsed_cookie.h', 'disk_cache/addr.cc', 'disk_cache/addr.h', 'disk_cache/backend_impl.cc', 'disk_cache/backend_impl.h', 'disk_cache/bitmap.cc', 'disk_cache/bitmap.h', 'disk_cache/block_files.cc', 'disk_cache/block_files.h', 'disk_cache/cache_creator.cc', 'disk_cache/cache_util.h', 'disk_cache/cache_util.cc', 'disk_cache/cache_util_posix.cc', 'disk_cache/cache_util_win.cc', 'disk_cache/disk_cache.h', 'disk_cache/disk_format.cc', 'disk_cache/disk_format.h', 'disk_cache/entry_impl.cc', 'disk_cache/entry_impl.h', 'disk_cache/errors.h', 'disk_cache/eviction.cc', 'disk_cache/eviction.h', 'disk_cache/experiments.h', 'disk_cache/file.cc', 'disk_cache/file.h', 'disk_cache/file_block.h', 'disk_cache/file_lock.cc', 'disk_cache/file_lock.h', 'disk_cache/file_posix.cc', 'disk_cache/file_win.cc', 'disk_cache/histogram_macros.h', 'disk_cache/in_flight_backend_io.cc', 'disk_cache/in_flight_backend_io.h', 'disk_cache/in_flight_io.cc', 'disk_cache/in_flight_io.h', 'disk_cache/mapped_file.h', 'disk_cache/mapped_file_posix.cc', 'disk_cache/mapped_file_avoid_mmap_posix.cc', 'disk_cache/mapped_file_win.cc', 'disk_cache/mem_backend_impl.cc', 'disk_cache/mem_backend_impl.h', 'disk_cache/mem_entry_impl.cc', 'disk_cache/mem_entry_impl.h', 'disk_cache/mem_rankings.cc', 'disk_cache/mem_rankings.h', 'disk_cache/net_log_parameters.cc', 'disk_cache/net_log_parameters.h', 'disk_cache/rankings.cc', 'disk_cache/rankings.h', 'disk_cache/sparse_control.cc', 'disk_cache/sparse_control.h', 'disk_cache/stats.cc', 'disk_cache/stats.h', 'disk_cache/stats_histogram.cc', 'disk_cache/stats_histogram.h', 'disk_cache/storage_block-inl.h', 'disk_cache/storage_block.h', 'disk_cache/stress_support.h', 'disk_cache/trace.cc', 'disk_cache/trace.h', 'disk_cache/simple/simple_backend_impl.cc', 'disk_cache/simple/simple_backend_impl.h', 'disk_cache/simple/simple_disk_format.cc', 'disk_cache/simple/simple_disk_format.h', 'disk_cache/simple/simple_entry_impl.cc', 'disk_cache/simple/simple_entry_impl.h', 'disk_cache/simple/simple_index.cc', 'disk_cache/simple/simple_index.h', 'disk_cache/simple/simple_synchronous_entry.cc', 'disk_cache/simple/simple_synchronous_entry.h', 'disk_cache/flash/flash_entry_impl.cc', 'disk_cache/flash/flash_entry_impl.h', 'disk_cache/flash/format.h', 'disk_cache/flash/internal_entry.cc', 'disk_cache/flash/internal_entry.h', 'disk_cache/flash/log_store.cc', 'disk_cache/flash/log_store.h', 'disk_cache/flash/log_store_entry.cc', 'disk_cache/flash/log_store_entry.h', 'disk_cache/flash/segment.cc', 'disk_cache/flash/segment.h', 'disk_cache/flash/storage.cc', 'disk_cache/flash/storage.h', 'dns/address_sorter.h', 'dns/address_sorter_posix.cc', 'dns/address_sorter_posix.h', 'dns/address_sorter_win.cc', 'dns/dns_client.cc', 'dns/dns_client.h', 'dns/dns_config_service.cc', 'dns/dns_config_service.h', 'dns/dns_config_service_posix.cc', 'dns/dns_config_service_posix.h', 'dns/dns_config_service_win.cc', 'dns/dns_config_service_win.h', 'dns/dns_hosts.cc', 'dns/dns_hosts.h', 'dns/dns_protocol.h', 'dns/dns_query.cc', 'dns/dns_query.h', 'dns/dns_response.cc', 'dns/dns_response.h', 'dns/dns_session.cc', 'dns/dns_session.h', 'dns/dns_socket_pool.cc', 'dns/dns_socket_pool.h', 'dns/dns_transaction.cc', 'dns/dns_transaction.h', 'dns/host_cache.cc', 'dns/host_cache.h', 'dns/host_resolver.cc', 'dns/host_resolver.h', 'dns/host_resolver_impl.cc', 'dns/host_resolver_impl.h', 'dns/host_resolver_proc.cc', 'dns/host_resolver_proc.h', 'dns/mapped_host_resolver.cc', 'dns/mapped_host_resolver.h', 'dns/notify_watcher_mac.cc', 'dns/notify_watcher_mac.h', 'dns/serial_worker.cc', 'dns/serial_worker.h', 'dns/single_request_host_resolver.cc', 'dns/single_request_host_resolver.h', 'ftp/ftp_auth_cache.cc', 'ftp/ftp_auth_cache.h', 'ftp/ftp_ctrl_response_buffer.cc', 'ftp/ftp_ctrl_response_buffer.h', 'ftp/ftp_directory_listing_parser.cc', 'ftp/ftp_directory_listing_parser.h', 'ftp/ftp_directory_listing_parser_ls.cc', 'ftp/ftp_directory_listing_parser_ls.h', 'ftp/ftp_directory_listing_parser_netware.cc', 'ftp/ftp_directory_listing_parser_netware.h', 'ftp/ftp_directory_listing_parser_os2.cc', 'ftp/ftp_directory_listing_parser_os2.h', 'ftp/ftp_directory_listing_parser_vms.cc', 'ftp/ftp_directory_listing_parser_vms.h', 'ftp/ftp_directory_listing_parser_windows.cc', 'ftp/ftp_directory_listing_parser_windows.h', 'ftp/ftp_network_layer.cc', 'ftp/ftp_network_layer.h', 'ftp/ftp_network_session.cc', 'ftp/ftp_network_session.h', 'ftp/ftp_network_transaction.cc', 'ftp/ftp_network_transaction.h', 'ftp/ftp_request_info.h', 'ftp/ftp_response_info.cc', 'ftp/ftp_response_info.h', 'ftp/ftp_server_type_histograms.cc', 'ftp/ftp_server_type_histograms.h', 'ftp/ftp_transaction.h', 'ftp/ftp_transaction_factory.h', 'ftp/ftp_util.cc', 'ftp/ftp_util.h', 'http/des.cc', 'http/des.h', 'http/http_atom_list.h', 'http/http_auth.cc', 'http/http_auth.h', 'http/http_auth_cache.cc', 'http/http_auth_cache.h', 'http/http_auth_controller.cc', 'http/http_auth_controller.h', 'http/http_auth_filter.cc', 'http/http_auth_filter.h', 'http/http_auth_filter_win.h', 'http/http_auth_gssapi_posix.cc', 'http/http_auth_gssapi_posix.h', 'http/http_auth_handler.cc', 'http/http_auth_handler.h', 'http/http_auth_handler_basic.cc', 'http/http_auth_handler_basic.h', 'http/http_auth_handler_digest.cc', 'http/http_auth_handler_digest.h', 'http/http_auth_handler_factory.cc', 'http/http_auth_handler_factory.h', 'http/http_auth_handler_negotiate.cc', 'http/http_auth_handler_negotiate.h', 'http/http_auth_handler_ntlm.cc', 'http/http_auth_handler_ntlm.h', 'http/http_auth_handler_ntlm_portable.cc', 'http/http_auth_handler_ntlm_win.cc', 'http/http_auth_sspi_win.cc', 'http/http_auth_sspi_win.h', 'http/http_basic_stream.cc', 'http/http_basic_stream.h', 'http/http_byte_range.cc', 'http/http_byte_range.h', 'http/http_cache.cc', 'http/http_cache.h', 'http/http_cache_transaction.cc', 'http/http_cache_transaction.h', 'http/http_content_disposition.cc', 'http/http_content_disposition.h', 'http/http_chunked_decoder.cc', 'http/http_chunked_decoder.h', 'http/http_network_layer.cc', 'http/http_network_layer.h', 'http/http_network_session.cc', 'http/http_network_session.h', 'http/http_network_session_peer.cc', 'http/http_network_session_peer.h', 'http/http_network_transaction.cc', 'http/http_network_transaction.h', 'http/http_pipelined_connection.h', 'http/http_pipelined_connection_impl.cc', 'http/http_pipelined_connection_impl.h', 'http/http_pipelined_host.cc', 'http/http_pipelined_host.h', 'http/http_pipelined_host_capability.h', 'http/http_pipelined_host_forced.cc', 'http/http_pipelined_host_forced.h', 'http/http_pipelined_host_impl.cc', 'http/http_pipelined_host_impl.h', 'http/http_pipelined_host_pool.cc', 'http/http_pipelined_host_pool.h', 'http/http_pipelined_stream.cc', 'http/http_pipelined_stream.h', 'http/http_proxy_client_socket.cc', 'http/http_proxy_client_socket.h', 'http/http_proxy_client_socket_pool.cc', 'http/http_proxy_client_socket_pool.h', 'http/http_request_headers.cc', 'http/http_request_headers.h', 'http/http_request_info.cc', 'http/http_request_info.h', 'http/http_response_body_drainer.cc', 'http/http_response_body_drainer.h', 'http/http_response_headers.cc', 'http/http_response_headers.h', 'http/http_response_info.cc', 'http/http_response_info.h', 'http/http_security_headers.cc', 'http/http_security_headers.h', 'http/http_server_properties.cc', 'http/http_server_properties.h', 'http/http_server_properties_impl.cc', 'http/http_server_properties_impl.h', 'http/http_status_code.h', 'http/http_stream.h', 'http/http_stream_base.h', 'http/http_stream_factory.cc', 'http/http_stream_factory.h', 'http/http_stream_factory_impl.cc', 'http/http_stream_factory_impl.h', 'http/http_stream_factory_impl_job.cc', 'http/http_stream_factory_impl_job.h', 'http/http_stream_factory_impl_request.cc', 'http/http_stream_factory_impl_request.h', 'http/http_stream_parser.cc', 'http/http_stream_parser.h', 'http/http_transaction.h', 'http/http_transaction_delegate.h', 'http/http_transaction_factory.h', 'http/http_util.cc', 'http/http_util.h', 'http/http_util_icu.cc', 'http/http_vary_data.cc', 'http/http_vary_data.h', 'http/http_version.h', 'http/md4.cc', 'http/md4.h', 'http/partial_data.cc', 'http/partial_data.h', 'http/proxy_client_socket.h', 'http/proxy_client_socket.cc', 'http/transport_security_state.cc', 'http/transport_security_state.h', 'http/transport_security_state_static.h', 'http/url_security_manager.cc', 'http/url_security_manager.h', 'http/url_security_manager_posix.cc', 'http/url_security_manager_win.cc', 'ocsp/nss_ocsp.cc', 'ocsp/nss_ocsp.h', 'proxy/dhcp_proxy_script_adapter_fetcher_win.cc', 'proxy/dhcp_proxy_script_adapter_fetcher_win.h', 'proxy/dhcp_proxy_script_fetcher.cc', 'proxy/dhcp_proxy_script_fetcher.h', 'proxy/dhcp_proxy_script_fetcher_factory.cc', 'proxy/dhcp_proxy_script_fetcher_factory.h', 'proxy/dhcp_proxy_script_fetcher_win.cc', 'proxy/dhcp_proxy_script_fetcher_win.h', 'proxy/dhcpcsvc_init_win.cc', 'proxy/dhcpcsvc_init_win.h', 'proxy/multi_threaded_proxy_resolver.cc', 'proxy/multi_threaded_proxy_resolver.h', 'proxy/network_delegate_error_observer.cc', 'proxy/network_delegate_error_observer.h', 'proxy/polling_proxy_config_service.cc', 'proxy/polling_proxy_config_service.h', 'proxy/proxy_bypass_rules.cc', 'proxy/proxy_bypass_rules.h', 'proxy/proxy_config.cc', 'proxy/proxy_config.h', 'proxy/proxy_config_service.h', 'proxy/proxy_config_service_android.cc', 'proxy/proxy_config_service_android.h', 'proxy/proxy_config_service_fixed.cc', 'proxy/proxy_config_service_fixed.h', 'proxy/proxy_config_service_ios.cc', 'proxy/proxy_config_service_ios.h', 'proxy/proxy_config_service_linux.cc', 'proxy/proxy_config_service_linux.h', 'proxy/proxy_config_service_mac.cc', 'proxy/proxy_config_service_mac.h', 'proxy/proxy_config_service_win.cc', 'proxy/proxy_config_service_win.h', 'proxy/proxy_config_source.cc', 'proxy/proxy_config_source.h', 'proxy/proxy_info.cc', 'proxy/proxy_info.h', 'proxy/proxy_list.cc', 'proxy/proxy_list.h', 'proxy/proxy_resolver.h', 'proxy/proxy_resolver_error_observer.h', 'proxy/proxy_resolver_mac.cc', 'proxy/proxy_resolver_mac.h', 'proxy/proxy_resolver_script.h', 'proxy/proxy_resolver_script_data.cc', 'proxy/proxy_resolver_script_data.h', 'proxy/proxy_resolver_winhttp.cc', 'proxy/proxy_resolver_winhttp.h', 'proxy/proxy_retry_info.h', 'proxy/proxy_script_decider.cc', 'proxy/proxy_script_decider.h', 'proxy/proxy_script_fetcher.h', 'proxy/proxy_script_fetcher_impl.cc', 'proxy/proxy_script_fetcher_impl.h', 'proxy/proxy_server.cc', 'proxy/proxy_server.h', 'proxy/proxy_server_mac.cc', 'proxy/proxy_service.cc', 'proxy/proxy_service.h', 'quic/blocked_list.h', 'quic/congestion_control/available_channel_estimator.cc', 'quic/congestion_control/available_channel_estimator.h', 'quic/congestion_control/channel_estimator.cc', 'quic/congestion_control/channel_estimator.h', 'quic/congestion_control/cube_root.cc', 'quic/congestion_control/cube_root.h', 'quic/congestion_control/cubic.cc', 'quic/congestion_control/cubic.h', 'quic/congestion_control/fix_rate_receiver.cc', 'quic/congestion_control/fix_rate_receiver.h', 'quic/congestion_control/fix_rate_sender.cc', 'quic/congestion_control/fix_rate_sender.h', 'quic/congestion_control/hybrid_slow_start.cc', 'quic/congestion_control/hybrid_slow_start.h', 'quic/congestion_control/inter_arrival_bitrate_ramp_up.cc', 'quic/congestion_control/inter_arrival_bitrate_ramp_up.h', 'quic/congestion_control/inter_arrival_overuse_detector.cc', 'quic/congestion_control/inter_arrival_overuse_detector.h', 'quic/congestion_control/inter_arrival_probe.cc', 'quic/congestion_control/inter_arrival_probe.h', 'quic/congestion_control/inter_arrival_receiver.cc', 'quic/congestion_control/inter_arrival_receiver.h', 'quic/congestion_control/inter_arrival_sender.cc', 'quic/congestion_control/inter_arrival_sender.h', 'quic/congestion_control/inter_arrival_state_machine.cc', 'quic/congestion_control/inter_arrival_state_machine.h', 'quic/congestion_control/leaky_bucket.cc', 'quic/congestion_control/leaky_bucket.h', 'quic/congestion_control/paced_sender.cc', 'quic/congestion_control/paced_sender.h', 'quic/congestion_control/quic_congestion_manager.cc', 'quic/congestion_control/quic_congestion_manager.h', 'quic/congestion_control/quic_max_sized_map.h', 'quic/congestion_control/receive_algorithm_interface.cc', 'quic/congestion_control/receive_algorithm_interface.h', 'quic/congestion_control/send_algorithm_interface.cc', 'quic/congestion_control/send_algorithm_interface.h', 'quic/congestion_control/tcp_cubic_sender.cc', 'quic/congestion_control/tcp_cubic_sender.h', 'quic/congestion_control/tcp_receiver.cc', 'quic/congestion_control/tcp_receiver.h', 'quic/crypto/aes_128_gcm_decrypter.h', 'quic/crypto/aes_128_gcm_decrypter_nss.cc', 'quic/crypto/aes_128_gcm_decrypter_openssl.cc', 'quic/crypto/aes_128_gcm_encrypter.h', 'quic/crypto/aes_128_gcm_encrypter_nss.cc', 'quic/crypto/aes_128_gcm_encrypter_openssl.cc', 'quic/crypto/crypto_framer.cc', 'quic/crypto/crypto_framer.h', 'quic/crypto/crypto_handshake.cc', 'quic/crypto/crypto_handshake.h', 'quic/crypto/crypto_protocol.h', 'quic/crypto/crypto_utils.cc', 'quic/crypto/crypto_utils.h', 'quic/crypto/curve25519_key_exchange.cc', 'quic/crypto/curve25519_key_exchange.h', 'quic/crypto/key_exchange.h', 'quic/crypto/null_decrypter.cc', 'quic/crypto/null_decrypter.h', 'quic/crypto/null_encrypter.cc', 'quic/crypto/null_encrypter.h', 'quic/crypto/p256_key_exchange.h', 'quic/crypto/p256_key_exchange_nss.cc', 'quic/crypto/p256_key_exchange_openssl.cc', 'quic/crypto/quic_decrypter.cc', 'quic/crypto/quic_decrypter.h', 'quic/crypto/quic_encrypter.cc', 'quic/crypto/quic_encrypter.h', 'quic/crypto/quic_random.cc', 'quic/crypto/quic_random.h', 'quic/crypto/scoped_evp_cipher_ctx.h', 'quic/crypto/strike_register.cc', 'quic/crypto/strike_register.h', 'quic/quic_bandwidth.cc', 'quic/quic_bandwidth.h', 'quic/quic_blocked_writer_interface.h', 'quic/quic_client_session.cc', 'quic/quic_client_session.h', 'quic/quic_crypto_client_stream.cc', 'quic/quic_crypto_client_stream.h', 'quic/quic_crypto_client_stream_factory.h', 'quic/quic_crypto_server_stream.cc', 'quic/quic_crypto_server_stream.h', 'quic/quic_crypto_stream.cc', 'quic/quic_crypto_stream.h', 'quic/quic_clock.cc', 'quic/quic_clock.h', 'quic/quic_connection.cc', 'quic/quic_connection.h', 'quic/quic_connection_helper.cc', 'quic/quic_connection_helper.h', 'quic/quic_connection_logger.cc', 'quic/quic_connection_logger.h', 'quic/quic_data_reader.cc', 'quic/quic_data_reader.h', 'quic/quic_data_writer.cc', 'quic/quic_data_writer.h', 'quic/quic_fec_group.cc', 'quic/quic_fec_group.h', 'quic/quic_framer.cc', 'quic/quic_framer.h', 'quic/quic_http_stream.cc', 'quic/quic_http_stream.h', 'quic/quic_packet_creator.cc', 'quic/quic_packet_creator.h', 'quic/quic_packet_entropy_manager.cc', 'quic/quic_packet_entropy_manager.h', 'quic/quic_packet_generator.cc', 'quic/quic_packet_generator.h', 'quic/quic_protocol.cc', 'quic/quic_protocol.h', 'quic/quic_reliable_client_stream.cc', 'quic/quic_reliable_client_stream.h', 'quic/quic_session.cc', 'quic/quic_session.h', 'quic/quic_stats.cc', 'quic/quic_stats.h', 'quic/quic_stream_factory.cc', 'quic/quic_stream_factory.h', 'quic/quic_stream_sequencer.cc', 'quic/quic_stream_sequencer.h', 'quic/quic_time.cc', 'quic/quic_time.h', 'quic/quic_utils.cc', 'quic/quic_utils.h', 'quic/reliable_quic_stream.cc', 'quic/reliable_quic_stream.h', 'socket/buffered_write_stream_socket.cc', 'socket/buffered_write_stream_socket.h', 'socket/client_socket_factory.cc', 'socket/client_socket_factory.h', 'socket/client_socket_handle.cc', 'socket/client_socket_handle.h', 'socket/client_socket_pool.cc', 'socket/client_socket_pool.h', 'socket/client_socket_pool_base.cc', 'socket/client_socket_pool_base.h', 'socket/client_socket_pool_histograms.cc', 'socket/client_socket_pool_histograms.h', 'socket/client_socket_pool_manager.cc', 'socket/client_socket_pool_manager.h', 'socket/client_socket_pool_manager_impl.cc', 'socket/client_socket_pool_manager_impl.h', 'socket/next_proto.h', 'socket/nss_ssl_util.cc', 'socket/nss_ssl_util.h', 'socket/server_socket.h', 'socket/socket_net_log_params.cc', 'socket/socket_net_log_params.h', 'socket/socket.h', 'socket/socks5_client_socket.cc', 'socket/socks5_client_socket.h', 'socket/socks_client_socket.cc', 'socket/socks_client_socket.h', 'socket/socks_client_socket_pool.cc', 'socket/socks_client_socket_pool.h', 'socket/ssl_client_socket.cc', 'socket/ssl_client_socket.h', 'socket/ssl_client_socket_nss.cc', 'socket/ssl_client_socket_nss.h', 'socket/ssl_client_socket_openssl.cc', 'socket/ssl_client_socket_openssl.h', 'socket/ssl_client_socket_pool.cc', 'socket/ssl_client_socket_pool.h', 'socket/ssl_error_params.cc', 'socket/ssl_error_params.h', 'socket/ssl_server_socket.h', 'socket/ssl_server_socket_nss.cc', 'socket/ssl_server_socket_nss.h', 'socket/ssl_server_socket_openssl.cc', 'socket/ssl_socket.h', 'socket/stream_listen_socket.cc', 'socket/stream_listen_socket.h', 'socket/stream_socket.cc', 'socket/stream_socket.h', 'socket/tcp_client_socket.cc', 'socket/tcp_client_socket.h', 'socket/tcp_client_socket_libevent.cc', 'socket/tcp_client_socket_libevent.h', 'socket/tcp_client_socket_win.cc', 'socket/tcp_client_socket_win.h', 'socket/tcp_listen_socket.cc', 'socket/tcp_listen_socket.h', 'socket/tcp_server_socket.h', 'socket/tcp_server_socket_libevent.cc', 'socket/tcp_server_socket_libevent.h', 'socket/tcp_server_socket_win.cc', 'socket/tcp_server_socket_win.h', 'socket/transport_client_socket_pool.cc', 'socket/transport_client_socket_pool.h', 'socket/unix_domain_socket_posix.cc', 'socket/unix_domain_socket_posix.h', 'socket_stream/socket_stream.cc', 'socket_stream/socket_stream.h', 'socket_stream/socket_stream_job.cc', 'socket_stream/socket_stream_job.h', 'socket_stream/socket_stream_job_manager.cc', 'socket_stream/socket_stream_job_manager.h', 'socket_stream/socket_stream_metrics.cc', 'socket_stream/socket_stream_metrics.h', 'spdy/buffered_spdy_framer.cc', 'spdy/buffered_spdy_framer.h', 'spdy/spdy_bitmasks.h', 'spdy/spdy_credential_builder.cc', 'spdy/spdy_credential_builder.h', 'spdy/spdy_credential_state.cc', 'spdy/spdy_credential_state.h', 'spdy/spdy_frame_builder.cc', 'spdy/spdy_frame_builder.h', 'spdy/spdy_frame_reader.cc', 'spdy/spdy_frame_reader.h', 'spdy/spdy_framer.cc', 'spdy/spdy_framer.h', 'spdy/spdy_header_block.cc', 'spdy/spdy_header_block.h', 'spdy/spdy_http_stream.cc', 'spdy/spdy_http_stream.h', 'spdy/spdy_http_utils.cc', 'spdy/spdy_http_utils.h', 'spdy/spdy_io_buffer.cc', 'spdy/spdy_io_buffer.h', 'spdy/spdy_priority_forest.h', 'spdy/spdy_protocol.cc', 'spdy/spdy_protocol.h', 'spdy/spdy_proxy_client_socket.cc', 'spdy/spdy_proxy_client_socket.h', 'spdy/spdy_session.cc', 'spdy/spdy_session.h', 'spdy/spdy_session_pool.cc', 'spdy/spdy_session_pool.h', 'spdy/spdy_stream.cc', 'spdy/spdy_stream.h', 'spdy/spdy_websocket_stream.cc', 'spdy/spdy_websocket_stream.h', 'ssl/client_cert_store.h', 'ssl/client_cert_store_impl.h', 'ssl/client_cert_store_impl_mac.cc', 'ssl/client_cert_store_impl_nss.cc', 'ssl/client_cert_store_impl_win.cc', 'ssl/default_server_bound_cert_store.cc', 'ssl/default_server_bound_cert_store.h', 'ssl/openssl_client_key_store.cc', 'ssl/openssl_client_key_store.h', 'ssl/server_bound_cert_service.cc', 'ssl/server_bound_cert_service.h', 'ssl/server_bound_cert_store.cc', 'ssl/server_bound_cert_store.h', 'ssl/ssl_cert_request_info.cc', 'ssl/ssl_cert_request_info.h', 'ssl/ssl_cipher_suite_names.cc', 'ssl/ssl_cipher_suite_names.h', 'ssl/ssl_client_auth_cache.cc', 'ssl/ssl_client_auth_cache.h', 'ssl/ssl_client_cert_type.h', 'ssl/ssl_config_service.cc', 'ssl/ssl_config_service.h', 'ssl/ssl_config_service_defaults.cc', 'ssl/ssl_config_service_defaults.h', 'ssl/ssl_info.cc', 'ssl/ssl_info.h', 'third_party/mozilla_security_manager/nsKeygenHandler.cpp', 'third_party/mozilla_security_manager/nsKeygenHandler.h', 'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp', 'third_party/mozilla_security_manager/nsNSSCertificateDB.h', 'third_party/mozilla_security_manager/nsPKCS12Blob.cpp', 'third_party/mozilla_security_manager/nsPKCS12Blob.h', 'udp/datagram_client_socket.h', 'udp/datagram_server_socket.h', 'udp/datagram_socket.h', 'udp/udp_client_socket.cc', 'udp/udp_client_socket.h', 'udp/udp_net_log_parameters.cc', 'udp/udp_net_log_parameters.h', 'udp/udp_server_socket.cc', 'udp/udp_server_socket.h', 'udp/udp_socket.h', 'udp/udp_socket_libevent.cc', 'udp/udp_socket_libevent.h', 'udp/udp_socket_win.cc', 'udp/udp_socket_win.h', 'url_request/data_protocol_handler.cc', 'url_request/data_protocol_handler.h', 'url_request/file_protocol_handler.cc', 'url_request/file_protocol_handler.h', 'url_request/fraudulent_certificate_reporter.h', 'url_request/ftp_protocol_handler.cc', 'url_request/ftp_protocol_handler.h', 'url_request/http_user_agent_settings.h', 'url_request/protocol_intercept_job_factory.cc', 'url_request/protocol_intercept_job_factory.h', 'url_request/static_http_user_agent_settings.cc', 'url_request/static_http_user_agent_settings.h', 'url_request/url_fetcher.cc', 'url_request/url_fetcher.h', 'url_request/url_fetcher_core.cc', 'url_request/url_fetcher_core.h', 'url_request/url_fetcher_delegate.cc', 'url_request/url_fetcher_delegate.h', 'url_request/url_fetcher_factory.h', 'url_request/url_fetcher_impl.cc', 'url_request/url_fetcher_impl.h', 'url_request/url_fetcher_response_writer.cc', 'url_request/url_fetcher_response_writer.h', 'url_request/url_request.cc', 'url_request/url_request.h', 'url_request/url_request_about_job.cc', 'url_request/url_request_about_job.h', 'url_request/url_request_context.cc', 'url_request/url_request_context.h', 'url_request/url_request_context_builder.cc', 'url_request/url_request_context_builder.h', 'url_request/url_request_context_getter.cc', 'url_request/url_request_context_getter.h', 'url_request/url_request_context_storage.cc', 'url_request/url_request_context_storage.h', 'url_request/url_request_data_job.cc', 'url_request/url_request_data_job.h', 'url_request/url_request_error_job.cc', 'url_request/url_request_error_job.h', 'url_request/url_request_file_dir_job.cc', 'url_request/url_request_file_dir_job.h', 'url_request/url_request_file_job.cc', 'url_request/url_request_file_job.h', 'url_request/url_request_filter.cc', 'url_request/url_request_filter.h', 'url_request/url_request_ftp_job.cc', 'url_request/url_request_ftp_job.h', 'url_request/url_request_http_job.cc', 'url_request/url_request_http_job.h', 'url_request/url_request_job.cc', 'url_request/url_request_job.h', 'url_request/url_request_job_factory.cc', 'url_request/url_request_job_factory.h', 'url_request/url_request_job_factory_impl.cc', 'url_request/url_request_job_factory_impl.h', 'url_request/url_request_job_manager.cc', 'url_request/url_request_job_manager.h', 'url_request/url_request_netlog_params.cc', 'url_request/url_request_netlog_params.h', 'url_request/url_request_redirect_job.cc', 'url_request/url_request_redirect_job.h', 'url_request/url_request_simple_job.cc', 'url_request/url_request_simple_job.h', 'url_request/url_request_status.h', 'url_request/url_request_test_job.cc', 'url_request/url_request_test_job.h', 'url_request/url_request_throttler_entry.cc', 'url_request/url_request_throttler_entry.h', 'url_request/url_request_throttler_entry_interface.h', 'url_request/url_request_throttler_header_adapter.cc', 'url_request/url_request_throttler_header_adapter.h', 'url_request/url_request_throttler_header_interface.h', 'url_request/url_request_throttler_manager.cc', 'url_request/url_request_throttler_manager.h', 'url_request/view_cache_helper.cc', 'url_request/view_cache_helper.h', 'websockets/websocket_errors.cc', 'websockets/websocket_errors.h', 'websockets/websocket_frame.cc', 'websockets/websocket_frame.h', 'websockets/websocket_frame_parser.cc', 'websockets/websocket_frame_parser.h', 'websockets/websocket_handshake_handler.cc', 'websockets/websocket_handshake_handler.h', 'websockets/websocket_job.cc', 'websockets/websocket_job.h', 'websockets/websocket_net_log_params.cc', 'websockets/websocket_net_log_params.h', 'websockets/websocket_stream.h', 'websockets/websocket_throttle.cc', 'websockets/websocket_throttle.h', ], 'defines': [ 'NET_IMPLEMENTATION', ], 'export_dependent_settings': [ '../base/base.gyp:base', ], 'conditions': [ ['chromeos==1', { 'sources!': [ 'base/network_change_notifier_linux.cc', 'base/network_change_notifier_linux.h', 'base/network_change_notifier_netlink_linux.cc', 'base/network_change_notifier_netlink_linux.h', 'proxy/proxy_config_service_linux.cc', 'proxy/proxy_config_service_linux.h', ], }], ['use_kerberos==1', { 'defines': [ 'USE_KERBEROS', ], 'conditions': [ ['OS=="openbsd"', { 'include_dirs': [ '/usr/include/kerberosV' ], }], ['linux_link_kerberos==1', { 'link_settings': { 'ldflags': [ '<!@(krb5-config --libs gssapi)', ], }, }, { # linux_link_kerberos==0 'defines': [ 'DLOPEN_KERBEROS', ], }], ], }, { # use_kerberos == 0 'sources!': [ 'http/http_auth_gssapi_posix.cc', 'http/http_auth_gssapi_posix.h', 'http/http_auth_handler_negotiate.h', 'http/http_auth_handler_negotiate.cc', ], }], ['posix_avoid_mmap==1', { 'defines': [ 'POSIX_AVOID_MMAP', ], 'direct_dependent_settings': { 'defines': [ 'POSIX_AVOID_MMAP', ], }, 'sources!': [ 'disk_cache/mapped_file_posix.cc', ], }, { # else 'sources!': [ 'disk_cache/mapped_file_avoid_mmap_posix.cc', ], }], ['disable_ftp_support==1', { 'sources/': [ ['exclude', '^ftp/'], ], 'sources!': [ 'url_request/ftp_protocol_handler.cc', 'url_request/ftp_protocol_handler.h', 'url_request/url_request_ftp_job.cc', 'url_request/url_request_ftp_job.h', ], }], ['enable_built_in_dns==1', { 'defines': [ 'ENABLE_BUILT_IN_DNS', ] }, { # else 'sources!': [ 'dns/address_sorter_posix.cc', 'dns/address_sorter_posix.h', 'dns/dns_client.cc', ], }], ['use_openssl==1', { 'sources!': [ 'base/crypto_module_nss.cc', 'base/keygen_handler_nss.cc', 'base/nss_memio.c', 'base/nss_memio.h', 'cert/cert_database_nss.cc', 'cert/cert_verify_proc_nss.cc', 'cert/cert_verify_proc_nss.h', 'cert/nss_cert_database.cc', 'cert/nss_cert_database.h', 'cert/test_root_certs_nss.cc', 'cert/x509_certificate_nss.cc', 'cert/x509_util_nss.cc', 'cert/x509_util_nss.h', 'ocsp/nss_ocsp.cc', 'ocsp/nss_ocsp.h', 'quic/crypto/aes_128_gcm_decrypter_nss.cc', 'quic/crypto/aes_128_gcm_encrypter_nss.cc', 'quic/crypto/p256_key_exchange_nss.cc', 'socket/nss_ssl_util.cc', 'socket/nss_ssl_util.h', 'socket/ssl_client_socket_nss.cc', 'socket/ssl_client_socket_nss.h', 'socket/ssl_server_socket_nss.cc', 'socket/ssl_server_socket_nss.h', 'ssl/client_cert_store_impl_nss.cc', 'third_party/mozilla_security_manager/nsKeygenHandler.cpp', 'third_party/mozilla_security_manager/nsKeygenHandler.h', 'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp', 'third_party/mozilla_security_manager/nsNSSCertificateDB.h', 'third_party/mozilla_security_manager/nsPKCS12Blob.cpp', 'third_party/mozilla_security_manager/nsPKCS12Blob.h', ], }, { # else !use_openssl: remove the unneeded files 'sources!': [ 'base/crypto_module_openssl.cc', 'base/keygen_handler_openssl.cc', 'base/openssl_private_key_store.h', 'base/openssl_private_key_store_android.cc', 'base/openssl_private_key_store_memory.cc', 'cert/cert_database_openssl.cc', 'cert/cert_verify_proc_openssl.cc', 'cert/cert_verify_proc_openssl.h', 'cert/test_root_certs_openssl.cc', 'cert/x509_certificate_openssl.cc', 'cert/x509_util_openssl.cc', 'cert/x509_util_openssl.h', 'quic/crypto/aes_128_gcm_decrypter_openssl.cc', 'quic/crypto/aes_128_gcm_encrypter_openssl.cc', 'quic/crypto/p256_key_exchange_openssl.cc', 'quic/crypto/scoped_evp_cipher_ctx.h', 'socket/ssl_client_socket_openssl.cc', 'socket/ssl_client_socket_openssl.h', 'socket/ssl_server_socket_openssl.cc', 'ssl/openssl_client_key_store.cc', 'ssl/openssl_client_key_store.h', ], }, ], [ 'use_glib == 1', { 'dependencies': [ '../build/linux/system.gyp:gconf', '../build/linux/system.gyp:gio', ], 'conditions': [ ['use_openssl==1', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', ], }, { # else use_openssl==0, use NSS 'dependencies': [ '../build/linux/system.gyp:ssl', ], }], ['os_bsd==1', { 'sources!': [ 'base/network_change_notifier_linux.cc', 'base/network_change_notifier_netlink_linux.cc', 'proxy/proxy_config_service_linux.cc', ], },{ 'dependencies': [ '../build/linux/system.gyp:libresolv', ], }], ['OS=="solaris"', { 'link_settings': { 'ldflags': [ '-R/usr/lib/mps', ], }, }], ], }, { # else: OS is not in the above list 'sources!': [ 'base/crypto_module_nss.cc', 'base/keygen_handler_nss.cc', 'cert/cert_database_nss.cc', 'cert/nss_cert_database.cc', 'cert/nss_cert_database.h', 'cert/test_root_certs_nss.cc', 'cert/x509_certificate_nss.cc', 'ocsp/nss_ocsp.cc', 'ocsp/nss_ocsp.h', 'third_party/mozilla_security_manager/nsKeygenHandler.cpp', 'third_party/mozilla_security_manager/nsKeygenHandler.h', 'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp', 'third_party/mozilla_security_manager/nsNSSCertificateDB.h', 'third_party/mozilla_security_manager/nsPKCS12Blob.cpp', 'third_party/mozilla_security_manager/nsPKCS12Blob.h', ], }, ], [ 'toolkit_uses_gtk == 1', { 'dependencies': [ '../build/linux/system.gyp:gdk', ], }], [ 'use_nss != 1', { 'sources!': [ 'cert/cert_verify_proc_nss.cc', 'cert/cert_verify_proc_nss.h', 'ssl/client_cert_store_impl_nss.cc', ], }], [ 'enable_websockets != 1', { 'sources/': [ ['exclude', '^socket_stream/'], ['exclude', '^websockets/'], ], 'sources!': [ 'spdy/spdy_websocket_stream.cc', 'spdy/spdy_websocket_stream.h', ], }], [ 'OS == "win"', { 'sources!': [ 'http/http_auth_handler_ntlm_portable.cc', 'socket/tcp_client_socket_libevent.cc', 'socket/tcp_client_socket_libevent.h', 'socket/tcp_server_socket_libevent.cc', 'socket/tcp_server_socket_libevent.h', 'ssl/client_cert_store_impl_nss.cc', 'udp/udp_socket_libevent.cc', 'udp/udp_socket_libevent.h', ], 'dependencies': [ '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', 'tld_cleanup', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { # else: OS != "win" 'sources!': [ 'base/winsock_init.cc', 'base/winsock_init.h', 'base/winsock_util.cc', 'base/winsock_util.h', 'proxy/proxy_resolver_winhttp.cc', 'proxy/proxy_resolver_winhttp.h', ], }, ], [ 'OS == "mac"', { 'sources!': [ 'ssl/client_cert_store_impl_nss.cc', ], 'dependencies': [ '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], 'link_settings': { 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework', '$(SDKROOT)/System/Library/Frameworks/Security.framework', '$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework', '$(SDKROOT)/usr/lib/libresolv.dylib', ] }, }, ], [ 'OS == "ios"', { 'dependencies': [ '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], 'link_settings': { 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/CFNetwork.framework', '$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework', '$(SDKROOT)/System/Library/Frameworks/Security.framework', '$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework', '$(SDKROOT)/usr/lib/libresolv.dylib', ], }, }, ], ['OS=="android" and _toolset=="target" and android_webview_build == 0', { 'dependencies': [ 'net_java', ], }], [ 'OS == "android"', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', 'net_jni_headers', ], 'sources!': [ 'base/openssl_private_key_store_memory.cc', 'cert/cert_database_openssl.cc', 'cert/cert_verify_proc_openssl.cc', 'cert/test_root_certs_openssl.cc', ], # The net/android/keystore_openssl.cc source file needs to # access an OpenSSL-internal header. 'include_dirs': [ '../third_party/openssl', ], }, { # else OS != "android" 'defines': [ # These are the features Android doesn't support. 'ENABLE_MEDIA_CODEC_THEORA', ], }, ], [ 'OS == "linux"', { 'dependencies': [ '../build/linux/system.gyp:dbus', '../dbus/dbus.gyp:dbus', ], }, ], ], 'target_conditions': [ # These source files are excluded by default platform rules, but they # are needed in specific cases on other platforms. Re-including them can # only be done in target_conditions as it is evaluated after the # platform rules. ['OS == "android"', { 'sources/': [ ['include', '^base/platform_mime_util_linux\\.cc$'], ], }], ['OS == "ios"', { 'sources/': [ ['include', '^base/network_change_notifier_mac\\.cc$'], ['include', '^base/network_config_watcher_mac\\.cc$'], ['include', '^base/platform_mime_util_mac\\.mm$'], # The iOS implementation only partially uses NSS and thus does not # defines |use_nss|. In particular the |USE_NSS| preprocessor # definition is not used. The following files are needed though: ['include', '^cert/cert_verify_proc_nss\\.cc$'], ['include', '^cert/cert_verify_proc_nss\\.h$'], ['include', '^cert/test_root_certs_nss\\.cc$'], ['include', '^cert/x509_util_nss\\.cc$'], ['include', '^cert/x509_util_nss\\.h$'], ['include', '^dns/notify_watcher_mac\\.cc$'], ['include', '^proxy/proxy_resolver_mac\\.cc$'], ['include', '^proxy/proxy_server_mac\\.cc$'], ['include', '^ocsp/nss_ocsp\\.cc$'], ['include', '^ocsp/nss_ocsp\\.h$'], ], }], ], }, { 'target_name': 'net_unittests', 'type': '<(gtest_target_type)', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../crypto/crypto.gyp:crypto', '../testing/gmock.gyp:gmock', '../testing/gtest.gyp:gtest', '../third_party/zlib/zlib.gyp:zlib', 'net', 'net_test_support', ], 'sources': [ 'android/keystore_unittest.cc', 'android/network_change_notifier_android_unittest.cc', 'base/address_list_unittest.cc', 'base/address_tracker_linux_unittest.cc', 'base/backoff_entry_unittest.cc', 'base/big_endian_unittest.cc', 'base/data_url_unittest.cc', 'base/directory_lister_unittest.cc', 'base/dns_util_unittest.cc', 'base/escape_unittest.cc', 'base/expiring_cache_unittest.cc', 'base/file_stream_unittest.cc', 'base/filter_unittest.cc', 'base/int128_unittest.cc', 'base/gzip_filter_unittest.cc', 'base/host_mapping_rules_unittest.cc', 'base/host_port_pair_unittest.cc', 'base/ip_endpoint_unittest.cc', 'base/keygen_handler_unittest.cc', 'base/mime_sniffer_unittest.cc', 'base/mime_util_unittest.cc', 'base/mock_filter_context.cc', 'base/mock_filter_context.h', 'base/net_log_unittest.cc', 'base/net_log_unittest.h', 'base/net_util_unittest.cc', 'base/network_change_notifier_win_unittest.cc', 'base/prioritized_dispatcher_unittest.cc', 'base/priority_queue_unittest.cc', 'base/registry_controlled_domains/registry_controlled_domain_unittest.cc', 'base/sdch_filter_unittest.cc', 'base/static_cookie_policy_unittest.cc', 'base/test_completion_callback_unittest.cc', 'base/upload_bytes_element_reader_unittest.cc', 'base/upload_data_stream_unittest.cc', 'base/upload_file_element_reader_unittest.cc', 'base/url_util_unittest.cc', 'cert/cert_verify_proc_unittest.cc', 'cert/crl_set_unittest.cc', 'cert/ev_root_ca_metadata_unittest.cc', 'cert/multi_threaded_cert_verifier_unittest.cc', 'cert/nss_cert_database_unittest.cc', 'cert/pem_tokenizer_unittest.cc', 'cert/x509_certificate_unittest.cc', 'cert/x509_cert_types_unittest.cc', 'cert/x509_util_unittest.cc', 'cert/x509_util_nss_unittest.cc', 'cert/x509_util_openssl_unittest.cc', 'cookies/canonical_cookie_unittest.cc', 'cookies/cookie_monster_unittest.cc', 'cookies/cookie_store_unittest.h', 'cookies/cookie_util_unittest.cc', 'cookies/parsed_cookie_unittest.cc', 'disk_cache/addr_unittest.cc', 'disk_cache/backend_unittest.cc', 'disk_cache/bitmap_unittest.cc', 'disk_cache/block_files_unittest.cc', 'disk_cache/cache_util_unittest.cc', 'disk_cache/entry_unittest.cc', 'disk_cache/mapped_file_unittest.cc', 'disk_cache/storage_block_unittest.cc', 'disk_cache/flash/flash_entry_unittest.cc', 'disk_cache/flash/log_store_entry_unittest.cc', 'disk_cache/flash/log_store_unittest.cc', 'disk_cache/flash/segment_unittest.cc', 'disk_cache/flash/storage_unittest.cc', 'dns/address_sorter_posix_unittest.cc', 'dns/address_sorter_unittest.cc', 'dns/dns_config_service_posix_unittest.cc', 'dns/dns_config_service_unittest.cc', 'dns/dns_config_service_win_unittest.cc', 'dns/dns_hosts_unittest.cc', 'dns/dns_query_unittest.cc', 'dns/dns_response_unittest.cc', 'dns/dns_session_unittest.cc', 'dns/dns_transaction_unittest.cc', 'dns/host_cache_unittest.cc', 'dns/host_resolver_impl_unittest.cc', 'dns/mapped_host_resolver_unittest.cc', 'dns/serial_worker_unittest.cc', 'dns/single_request_host_resolver_unittest.cc', 'ftp/ftp_auth_cache_unittest.cc', 'ftp/ftp_ctrl_response_buffer_unittest.cc', 'ftp/ftp_directory_listing_parser_ls_unittest.cc', 'ftp/ftp_directory_listing_parser_netware_unittest.cc', 'ftp/ftp_directory_listing_parser_os2_unittest.cc', 'ftp/ftp_directory_listing_parser_unittest.cc', 'ftp/ftp_directory_listing_parser_unittest.h', 'ftp/ftp_directory_listing_parser_vms_unittest.cc', 'ftp/ftp_directory_listing_parser_windows_unittest.cc', 'ftp/ftp_network_transaction_unittest.cc', 'ftp/ftp_util_unittest.cc', 'http/des_unittest.cc', 'http/http_auth_cache_unittest.cc', 'http/http_auth_controller_unittest.cc', 'http/http_auth_filter_unittest.cc', 'http/http_auth_gssapi_posix_unittest.cc', 'http/http_auth_handler_basic_unittest.cc', 'http/http_auth_handler_digest_unittest.cc', 'http/http_auth_handler_factory_unittest.cc', 'http/http_auth_handler_mock.cc', 'http/http_auth_handler_mock.h', 'http/http_auth_handler_negotiate_unittest.cc', 'http/http_auth_handler_unittest.cc', 'http/http_auth_sspi_win_unittest.cc', 'http/http_auth_unittest.cc', 'http/http_byte_range_unittest.cc', 'http/http_cache_unittest.cc', 'http/http_chunked_decoder_unittest.cc', 'http/http_content_disposition_unittest.cc', 'http/http_network_layer_unittest.cc', 'http/http_network_transaction_spdy3_unittest.cc', 'http/http_network_transaction_spdy2_unittest.cc', 'http/http_pipelined_connection_impl_unittest.cc', 'http/http_pipelined_host_forced_unittest.cc', 'http/http_pipelined_host_impl_unittest.cc', 'http/http_pipelined_host_pool_unittest.cc', 'http/http_pipelined_host_test_util.cc', 'http/http_pipelined_host_test_util.h', 'http/http_pipelined_network_transaction_unittest.cc', 'http/http_proxy_client_socket_pool_spdy2_unittest.cc', 'http/http_proxy_client_socket_pool_spdy3_unittest.cc', 'http/http_request_headers_unittest.cc', 'http/http_response_body_drainer_unittest.cc', 'http/http_response_headers_unittest.cc', 'http/http_security_headers_unittest.cc', 'http/http_server_properties_impl_unittest.cc', 'http/http_stream_factory_impl_unittest.cc', 'http/http_stream_parser_unittest.cc', 'http/http_transaction_unittest.cc', 'http/http_transaction_unittest.h', 'http/http_util_unittest.cc', 'http/http_vary_data_unittest.cc', 'http/mock_allow_url_security_manager.cc', 'http/mock_allow_url_security_manager.h', 'http/mock_gssapi_library_posix.cc', 'http/mock_gssapi_library_posix.h', 'http/mock_http_cache.cc', 'http/mock_http_cache.h', 'http/mock_sspi_library_win.cc', 'http/mock_sspi_library_win.h', 'http/transport_security_state_unittest.cc', 'http/url_security_manager_unittest.cc', 'proxy/dhcp_proxy_script_adapter_fetcher_win_unittest.cc', 'proxy/dhcp_proxy_script_fetcher_factory_unittest.cc', 'proxy/dhcp_proxy_script_fetcher_win_unittest.cc', 'proxy/multi_threaded_proxy_resolver_unittest.cc', 'proxy/network_delegate_error_observer_unittest.cc', 'proxy/proxy_bypass_rules_unittest.cc', 'proxy/proxy_config_service_android_unittest.cc', 'proxy/proxy_config_service_linux_unittest.cc', 'proxy/proxy_config_service_win_unittest.cc', 'proxy/proxy_config_unittest.cc', 'proxy/proxy_info_unittest.cc', 'proxy/proxy_list_unittest.cc', 'proxy/proxy_resolver_v8_tracing_unittest.cc', 'proxy/proxy_resolver_v8_unittest.cc', 'proxy/proxy_script_decider_unittest.cc', 'proxy/proxy_script_fetcher_impl_unittest.cc', 'proxy/proxy_server_unittest.cc', 'proxy/proxy_service_unittest.cc', 'quic/blocked_list_test.cc', 'quic/congestion_control/available_channel_estimator_test.cc', 'quic/congestion_control/channel_estimator_test.cc', 'quic/congestion_control/cube_root_test.cc', 'quic/congestion_control/cubic_test.cc', 'quic/congestion_control/fix_rate_test.cc', 'quic/congestion_control/hybrid_slow_start_test.cc', 'quic/congestion_control/inter_arrival_bitrate_ramp_up_test.cc', 'quic/congestion_control/inter_arrival_overuse_detector_test.cc', 'quic/congestion_control/inter_arrival_probe_test.cc', 'quic/congestion_control/inter_arrival_receiver_test.cc', 'quic/congestion_control/inter_arrival_state_machine_test.cc', 'quic/congestion_control/inter_arrival_sender_test.cc', 'quic/congestion_control/leaky_bucket_test.cc', 'quic/congestion_control/paced_sender_test.cc', 'quic/congestion_control/quic_congestion_control_test.cc', 'quic/congestion_control/quic_congestion_manager_test.cc', 'quic/congestion_control/quic_max_sized_map_test.cc', 'quic/congestion_control/tcp_cubic_sender_test.cc', 'quic/congestion_control/tcp_receiver_test.cc', 'quic/crypto/aes_128_gcm_decrypter_test.cc', 'quic/crypto/aes_128_gcm_encrypter_test.cc', 'quic/crypto/crypto_framer_test.cc', 'quic/crypto/crypto_handshake_test.cc', 'quic/crypto/curve25519_key_exchange_test.cc', 'quic/crypto/null_decrypter_test.cc', 'quic/crypto/null_encrypter_test.cc', 'quic/crypto/p256_key_exchange_test.cc', 'quic/crypto/quic_random_test.cc', 'quic/crypto/strike_register_test.cc', 'quic/test_tools/crypto_test_utils.cc', 'quic/test_tools/crypto_test_utils.h', 'quic/test_tools/mock_clock.cc', 'quic/test_tools/mock_clock.h', 'quic/test_tools/mock_crypto_client_stream.cc', 'quic/test_tools/mock_crypto_client_stream.h', 'quic/test_tools/mock_crypto_client_stream_factory.cc', 'quic/test_tools/mock_crypto_client_stream_factory.h', 'quic/test_tools/mock_random.cc', 'quic/test_tools/mock_random.h', 'quic/test_tools/quic_connection_peer.cc', 'quic/test_tools/quic_connection_peer.h', 'quic/test_tools/quic_framer_peer.cc', 'quic/test_tools/quic_framer_peer.h', 'quic/test_tools/quic_packet_creator_peer.cc', 'quic/test_tools/quic_packet_creator_peer.h', 'quic/test_tools/quic_session_peer.cc', 'quic/test_tools/quic_session_peer.h', 'quic/test_tools/quic_test_utils.cc', 'quic/test_tools/quic_test_utils.h', 'quic/test_tools/reliable_quic_stream_peer.cc', 'quic/test_tools/reliable_quic_stream_peer.h', 'quic/test_tools/simple_quic_framer.cc', 'quic/test_tools/simple_quic_framer.h', 'quic/test_tools/test_task_runner.cc', 'quic/test_tools/test_task_runner.h', 'quic/quic_bandwidth_test.cc', 'quic/quic_client_session_test.cc', 'quic/quic_clock_test.cc', 'quic/quic_connection_helper_test.cc', 'quic/quic_connection_test.cc', 'quic/quic_crypto_client_stream_test.cc', 'quic/quic_crypto_server_stream_test.cc', 'quic/quic_crypto_stream_test.cc', 'quic/quic_data_writer_test.cc', 'quic/quic_fec_group_test.cc', 'quic/quic_framer_test.cc', 'quic/quic_http_stream_test.cc', 'quic/quic_network_transaction_unittest.cc', 'quic/quic_packet_creator_test.cc', 'quic/quic_packet_entropy_manager_test.cc', 'quic/quic_packet_generator_test.cc', 'quic/quic_protocol_test.cc', 'quic/quic_reliable_client_stream_test.cc', 'quic/quic_session_test.cc', 'quic/quic_stream_factory_test.cc', 'quic/quic_stream_sequencer_test.cc', 'quic/quic_time_test.cc', 'quic/quic_utils_test.cc', 'quic/reliable_quic_stream_test.cc', 'socket/buffered_write_stream_socket_unittest.cc', 'socket/client_socket_pool_base_unittest.cc', 'socket/deterministic_socket_data_unittest.cc', 'socket/mock_client_socket_pool_manager.cc', 'socket/mock_client_socket_pool_manager.h', 'socket/socks5_client_socket_unittest.cc', 'socket/socks_client_socket_pool_unittest.cc', 'socket/socks_client_socket_unittest.cc', 'socket/ssl_client_socket_openssl_unittest.cc', 'socket/ssl_client_socket_pool_unittest.cc', 'socket/ssl_client_socket_unittest.cc', 'socket/ssl_server_socket_unittest.cc', 'socket/tcp_client_socket_unittest.cc', 'socket/tcp_listen_socket_unittest.cc', 'socket/tcp_listen_socket_unittest.h', 'socket/tcp_server_socket_unittest.cc', 'socket/transport_client_socket_pool_unittest.cc', 'socket/transport_client_socket_unittest.cc', 'socket/unix_domain_socket_posix_unittest.cc', 'socket_stream/socket_stream_metrics_unittest.cc', 'socket_stream/socket_stream_unittest.cc', 'spdy/buffered_spdy_framer_spdy3_unittest.cc', 'spdy/buffered_spdy_framer_spdy2_unittest.cc', 'spdy/spdy_credential_builder_unittest.cc', 'spdy/spdy_credential_state_unittest.cc', 'spdy/spdy_frame_builder_test.cc', 'spdy/spdy_frame_reader_test.cc', 'spdy/spdy_framer_test.cc', 'spdy/spdy_header_block_unittest.cc', 'spdy/spdy_http_stream_spdy3_unittest.cc', 'spdy/spdy_http_stream_spdy2_unittest.cc', 'spdy/spdy_http_utils_unittest.cc', 'spdy/spdy_network_transaction_spdy3_unittest.cc', 'spdy/spdy_network_transaction_spdy2_unittest.cc', 'spdy/spdy_priority_forest_test.cc', 'spdy/spdy_protocol_test.cc', 'spdy/spdy_proxy_client_socket_spdy3_unittest.cc', 'spdy/spdy_proxy_client_socket_spdy2_unittest.cc', 'spdy/spdy_session_spdy3_unittest.cc', 'spdy/spdy_session_spdy2_unittest.cc', 'spdy/spdy_stream_spdy3_unittest.cc', 'spdy/spdy_stream_spdy2_unittest.cc', 'spdy/spdy_stream_test_util.cc', 'spdy/spdy_stream_test_util.h', 'spdy/spdy_test_util_common.cc', 'spdy/spdy_test_util_common.h', 'spdy/spdy_test_util_spdy3.cc', 'spdy/spdy_test_util_spdy3.h', 'spdy/spdy_test_util_spdy2.cc', 'spdy/spdy_test_util_spdy2.h', 'spdy/spdy_test_utils.cc', 'spdy/spdy_test_utils.h', 'spdy/spdy_websocket_stream_spdy2_unittest.cc', 'spdy/spdy_websocket_stream_spdy3_unittest.cc', 'spdy/spdy_websocket_test_util_spdy2.cc', 'spdy/spdy_websocket_test_util_spdy2.h', 'spdy/spdy_websocket_test_util_spdy3.cc', 'spdy/spdy_websocket_test_util_spdy3.h', 'ssl/client_cert_store_impl_unittest.cc', 'ssl/default_server_bound_cert_store_unittest.cc', 'ssl/openssl_client_key_store_unittest.cc', 'ssl/server_bound_cert_service_unittest.cc', 'ssl/ssl_cipher_suite_names_unittest.cc', 'ssl/ssl_client_auth_cache_unittest.cc', 'ssl/ssl_config_service_unittest.cc', 'test/python_utils_unittest.cc', 'test/run_all_unittests.cc', 'test/test_certificate_data.h', 'tools/dump_cache/url_to_filename_encoder.cc', 'tools/dump_cache/url_to_filename_encoder.h', 'tools/dump_cache/url_to_filename_encoder_unittest.cc', 'tools/dump_cache/url_utilities.h', 'tools/dump_cache/url_utilities.cc', 'tools/dump_cache/url_utilities_unittest.cc', 'udp/udp_socket_unittest.cc', 'url_request/url_fetcher_impl_unittest.cc', 'url_request/url_request_context_builder_unittest.cc', 'url_request/url_request_filter_unittest.cc', 'url_request/url_request_ftp_job_unittest.cc', 'url_request/url_request_http_job_unittest.cc', 'url_request/url_request_job_factory_impl_unittest.cc', 'url_request/url_request_job_unittest.cc', 'url_request/url_request_throttler_simulation_unittest.cc', 'url_request/url_request_throttler_test_support.cc', 'url_request/url_request_throttler_test_support.h', 'url_request/url_request_throttler_unittest.cc', 'url_request/url_request_unittest.cc', 'url_request/view_cache_helper_unittest.cc', 'websockets/websocket_errors_unittest.cc', 'websockets/websocket_frame_parser_unittest.cc', 'websockets/websocket_frame_unittest.cc', 'websockets/websocket_handshake_handler_unittest.cc', 'websockets/websocket_handshake_handler_spdy2_unittest.cc', 'websockets/websocket_handshake_handler_spdy3_unittest.cc', 'websockets/websocket_job_spdy2_unittest.cc', 'websockets/websocket_job_spdy3_unittest.cc', 'websockets/websocket_net_log_params_unittest.cc', 'websockets/websocket_throttle_unittest.cc', ], 'conditions': [ ['chromeos==1', { 'sources!': [ 'base/network_change_notifier_linux_unittest.cc', 'proxy/proxy_config_service_linux_unittest.cc', ], }], [ 'OS == "android"', { 'sources!': [ # No res_ninit() et al on Android, so this doesn't make a lot of # sense. 'dns/dns_config_service_posix_unittest.cc', 'ssl/client_cert_store_impl_unittest.cc', ], 'dependencies': [ 'net_javatests', 'net_test_jni_headers', ], }], [ 'use_glib == 1', { 'dependencies': [ '../build/linux/system.gyp:ssl', ], }, { # else use_glib == 0: !posix || mac 'sources!': [ 'cert/nss_cert_database_unittest.cc', ], }, ], [ 'toolkit_uses_gtk == 1', { 'dependencies': [ '../build/linux/system.gyp:gtk', ], }, ], [ 'os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', { 'conditions': [ ['linux_use_tcmalloc==1', { 'dependencies': [ '../base/allocator/allocator.gyp:allocator', ], }], ], }], [ 'use_kerberos==1', { 'defines': [ 'USE_KERBEROS', ], }, { # use_kerberos == 0 'sources!': [ 'http/http_auth_gssapi_posix_unittest.cc', 'http/http_auth_handler_negotiate_unittest.cc', 'http/mock_gssapi_library_posix.cc', 'http/mock_gssapi_library_posix.h', ], }], [ 'use_openssl==1', { # When building for OpenSSL, we need to exclude NSS specific tests. # TODO(bulach): Add equivalent tests when the underlying # functionality is ported to OpenSSL. 'sources!': [ 'cert/nss_cert_database_unittest.cc', 'cert/x509_util_nss_unittest.cc', 'ssl/client_cert_store_impl_unittest.cc', ], }, { # else !use_openssl: remove the unneeded files 'sources!': [ 'cert/x509_util_openssl_unittest.cc', 'socket/ssl_client_socket_openssl_unittest.cc', 'ssl/openssl_client_key_store_unittest.cc', ], }, ], [ 'enable_websockets != 1', { 'sources/': [ ['exclude', '^socket_stream/'], ['exclude', '^websockets/'], ['exclude', '^spdy/spdy_websocket_stream_spdy._unittest\\.cc$'], ], }], [ 'disable_ftp_support==1', { 'sources/': [ ['exclude', '^ftp/'], ], 'sources!': [ 'url_request/url_request_ftp_job_unittest.cc', ], }, ], [ 'enable_built_in_dns!=1', { 'sources!': [ 'dns/address_sorter_posix_unittest.cc', 'dns/address_sorter_unittest.cc', ], }, ], [ 'use_v8_in_net==1', { 'dependencies': [ 'net_with_v8', ], }, { # else: !use_v8_in_net 'sources!': [ 'proxy/proxy_resolver_v8_unittest.cc', 'proxy/proxy_resolver_v8_tracing_unittest.cc', ], }, ], [ 'OS == "win"', { 'sources!': [ 'dns/dns_config_service_posix_unittest.cc', 'http/http_auth_gssapi_posix_unittest.cc', ], # This is needed to trigger the dll copy step on windows. # TODO(mark): Specifying this here shouldn't be necessary. 'dependencies': [ '../third_party/icu/icu.gyp:icudata', '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], [ 'OS == "mac"', { 'dependencies': [ '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], }, ], [ 'OS == "ios"', { 'dependencies': [ '../third_party/nss/nss.gyp:nss', ], 'actions': [ { 'action_name': 'copy_test_data', 'variables': { 'test_data_files': [ 'data/ssl/certificates/', 'data/url_request_unittest/', ], 'test_data_prefix': 'net', }, 'includes': [ '../build/copy_test_data_ios.gypi' ], }, ], 'sources!': [ # TODO(droger): The following tests are disabled because the # implementation is missing or incomplete. # KeygenHandler::GenKeyAndSignChallenge() is not ported to iOS. 'base/keygen_handler_unittest.cc', # Need to read input data files. 'base/gzip_filter_unittest.cc', 'disk_cache/backend_unittest.cc', 'disk_cache/block_files_unittest.cc', 'socket/ssl_server_socket_unittest.cc', # Need TestServer. 'proxy/proxy_script_fetcher_impl_unittest.cc', 'socket/ssl_client_socket_unittest.cc', 'ssl/client_cert_store_impl_unittest.cc', 'url_request/url_fetcher_impl_unittest.cc', 'url_request/url_request_context_builder_unittest.cc', # Needs GetAppOutput(). 'test/python_utils_unittest.cc', # The following tests are disabled because they don't apply to # iOS. # OS is not "linux" or "freebsd" or "openbsd". 'socket/unix_domain_socket_posix_unittest.cc', ], 'conditions': [ ['coverage != 0', { 'sources!': [ # These sources can't be built with coverage due to a # toolchain bug: http://openradar.appspot.com/radar?id=1499403 'http/transport_security_state_unittest.cc', # These tests crash when run with coverage turned on due to an # issue with llvm_gcda_increment_indirect_counter: # http://crbug.com/156058 'cookies/cookie_monster_unittest.cc', 'cookies/cookie_store_unittest.h', 'http/http_auth_controller_unittest.cc', 'http/http_network_layer_unittest.cc', 'http/http_network_transaction_spdy2_unittest.cc', 'http/http_network_transaction_spdy3_unittest.cc', 'spdy/spdy_http_stream_spdy2_unittest.cc', 'spdy/spdy_http_stream_spdy3_unittest.cc', 'spdy/spdy_proxy_client_socket_spdy3_unittest.cc', 'spdy/spdy_session_spdy3_unittest.cc', # These tests crash when run with coverage turned on: # http://crbug.com/177203 'proxy/proxy_service_unittest.cc', ], }], ], }], [ 'OS == "linux"', { 'dependencies': [ '../build/linux/system.gyp:dbus', '../dbus/dbus.gyp:dbus_test_support', ], }, ], [ 'OS == "android"', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', ], 'sources!': [ 'dns/dns_config_service_posix_unittest.cc', ], }, ], ['OS == "android" and gtest_target_type == "shared_library"', { 'dependencies': [ '../testing/android/native_test.gyp:native_test_native_code', ] }], [ 'OS != "win" and OS != "mac"', { 'sources!': [ 'cert/x509_cert_types_unittest.cc', ], }], ], }, { 'target_name': 'net_perftests', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/base.gyp:test_support_perf', '../build/temp_gyp/googleurl.gyp:googleurl', '../testing/gtest.gyp:gtest', 'net', 'net_test_support', ], 'sources': [ 'cookies/cookie_monster_perftest.cc', 'disk_cache/disk_cache_perftest.cc', 'proxy/proxy_resolver_perftest.cc', ], 'conditions': [ [ 'use_v8_in_net==1', { 'dependencies': [ 'net_with_v8', ], }, { # else: !use_v8_in_net 'sources!': [ 'proxy/proxy_resolver_perftest.cc', ], }, ], # This is needed to trigger the dll copy step on windows. # TODO(mark): Specifying this here shouldn't be necessary. [ 'OS == "win"', { 'dependencies': [ '../third_party/icu/icu.gyp:icudata', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], ], }, { 'target_name': 'net_test_support', 'type': 'static_library', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:test_support_base', '../build/temp_gyp/googleurl.gyp:googleurl', '../testing/gtest.gyp:gtest', 'net', ], 'export_dependent_settings': [ '../base/base.gyp:base', '../base/base.gyp:test_support_base', '../testing/gtest.gyp:gtest', ], 'sources': [ 'base/capturing_net_log.cc', 'base/capturing_net_log.h', 'base/load_timing_info_test_util.cc', 'base/load_timing_info_test_util.h', 'base/mock_file_stream.cc', 'base/mock_file_stream.h', 'base/test_completion_callback.cc', 'base/test_completion_callback.h', 'base/test_data_directory.cc', 'base/test_data_directory.h', 'cert/mock_cert_verifier.cc', 'cert/mock_cert_verifier.h', 'cookies/cookie_monster_store_test.cc', 'cookies/cookie_monster_store_test.h', 'cookies/cookie_store_test_callbacks.cc', 'cookies/cookie_store_test_callbacks.h', 'cookies/cookie_store_test_helpers.cc', 'cookies/cookie_store_test_helpers.h', 'disk_cache/disk_cache_test_base.cc', 'disk_cache/disk_cache_test_base.h', 'disk_cache/disk_cache_test_util.cc', 'disk_cache/disk_cache_test_util.h', 'disk_cache/flash/flash_cache_test_base.h', 'disk_cache/flash/flash_cache_test_base.cc', 'dns/dns_test_util.cc', 'dns/dns_test_util.h', 'dns/mock_host_resolver.cc', 'dns/mock_host_resolver.h', 'proxy/mock_proxy_resolver.cc', 'proxy/mock_proxy_resolver.h', 'proxy/mock_proxy_script_fetcher.cc', 'proxy/mock_proxy_script_fetcher.h', 'proxy/proxy_config_service_common_unittest.cc', 'proxy/proxy_config_service_common_unittest.h', 'socket/socket_test_util.cc', 'socket/socket_test_util.h', 'test/base_test_server.cc', 'test/base_test_server.h', 'test/cert_test_util.cc', 'test/cert_test_util.h', 'test/local_test_server_posix.cc', 'test/local_test_server_win.cc', 'test/local_test_server.cc', 'test/local_test_server.h', 'test/net_test_suite.cc', 'test/net_test_suite.h', 'test/python_utils.cc', 'test/python_utils.h', 'test/remote_test_server.cc', 'test/remote_test_server.h', 'test/spawner_communicator.cc', 'test/spawner_communicator.h', 'test/test_server.h', 'url_request/test_url_fetcher_factory.cc', 'url_request/test_url_fetcher_factory.h', 'url_request/url_request_test_util.cc', 'url_request/url_request_test_util.h', ], 'conditions': [ ['inside_chromium_build==1 and OS != "ios"', { 'dependencies': [ '../third_party/protobuf/protobuf.gyp:py_proto', ], }], ['os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', { 'conditions': [ ['use_openssl==1', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', ], }, { 'dependencies': [ '../build/linux/system.gyp:ssl', ], }], ], }], ['os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', { 'conditions': [ ['linux_use_tcmalloc==1', { 'dependencies': [ '../base/allocator/allocator.gyp:allocator', ], }], ], }], ['OS != "android"', { 'sources!': [ 'test/remote_test_server.cc', 'test/remote_test_server.h', 'test/spawner_communicator.cc', 'test/spawner_communicator.h', ], }], ['OS == "ios"', { 'dependencies': [ '../third_party/nss/nss.gyp:nss', ], }], [ 'use_v8_in_net==1', { 'dependencies': [ 'net_with_v8', ], }, ], ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'net_resources', 'type': 'none', 'variables': { 'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/net', }, 'actions': [ { 'action_name': 'net_resources', 'variables': { 'grit_grd_file': 'base/net_resources.grd', }, 'includes': [ '../build/grit_action.gypi' ], }, ], 'includes': [ '../build/grit_target.gypi' ], }, { 'target_name': 'http_server', 'type': 'static_library', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'server/http_connection.cc', 'server/http_connection.h', 'server/http_server.cc', 'server/http_server.h', 'server/http_server_request_info.cc', 'server/http_server_request_info.h', 'server/web_socket.cc', 'server/web_socket.h', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'dump_cache', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_test_support', ], 'sources': [ 'tools/dump_cache/cache_dumper.cc', 'tools/dump_cache/cache_dumper.h', 'tools/dump_cache/dump_cache.cc', 'tools/dump_cache/dump_files.cc', 'tools/dump_cache/dump_files.h', 'tools/dump_cache/simple_cache_dumper.cc', 'tools/dump_cache/simple_cache_dumper.h', 'tools/dump_cache/upgrade_win.cc', 'tools/dump_cache/upgrade_win.h', 'tools/dump_cache/url_to_filename_encoder.cc', 'tools/dump_cache/url_to_filename_encoder.h', 'tools/dump_cache/url_utilities.h', 'tools/dump_cache/url_utilities.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], 'conditions': [ ['use_v8_in_net == 1', { 'targets': [ { 'target_name': 'net_with_v8', 'type': '<(component)', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../build/temp_gyp/googleurl.gyp:googleurl', '../v8/tools/gyp/v8.gyp:v8', 'net' ], 'defines': [ 'NET_IMPLEMENTATION', ], 'sources': [ 'proxy/proxy_resolver_v8.cc', 'proxy/proxy_resolver_v8.h', 'proxy/proxy_resolver_v8_tracing.cc', 'proxy/proxy_resolver_v8_tracing.h', 'proxy/proxy_service_v8.cc', 'proxy/proxy_service_v8.h', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], }], ['OS != "ios"', { 'targets': [ # iOS doesn't have the concept of simple executables, these targets # can't be compiled on the platform. { 'target_name': 'crash_cache', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_test_support', ], 'sources': [ 'tools/crash_cache/crash_cache.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'crl_set_dump', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/crl_set_dump/crl_set_dump.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'dns_fuzz_stub', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/dns_fuzz_stub/dns_fuzz_stub.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'fetch_client', 'type': 'executable', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../testing/gtest.gyp:gtest', 'net', 'net_with_v8', ], 'sources': [ 'tools/fetch/fetch_client.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'fetch_server', 'type': 'executable', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../build/temp_gyp/googleurl.gyp:googleurl', 'net', ], 'sources': [ 'tools/fetch/fetch_server.cc', 'tools/fetch/http_listen_socket.cc', 'tools/fetch/http_listen_socket.h', 'tools/fetch/http_server.cc', 'tools/fetch/http_server.h', 'tools/fetch/http_server_request_info.cc', 'tools/fetch/http_server_request_info.h', 'tools/fetch/http_server_response_info.cc', 'tools/fetch/http_server_response_info.h', 'tools/fetch/http_session.cc', 'tools/fetch/http_session.h', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'gdig', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/gdig/file_net_log.cc', 'tools/gdig/gdig.cc', ], }, { 'target_name': 'get_server_time', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../build/temp_gyp/googleurl.gyp:googleurl', 'net', ], 'sources': [ 'tools/get_server_time/get_server_time.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'net_watcher', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_with_v8', ], 'conditions': [ [ 'use_glib == 1', { 'dependencies': [ '../build/linux/system.gyp:gconf', '../build/linux/system.gyp:gio', ], }, ], ], 'sources': [ 'tools/net_watcher/net_watcher.cc', ], }, { 'target_name': 'run_testserver', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:test_support_base', '../testing/gtest.gyp:gtest', 'net_test_support', ], 'sources': [ 'tools/testserver/run_testserver.cc', ], }, { 'target_name': 'stress_cache', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_test_support', ], 'sources': [ 'disk_cache/stress_cache.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'tld_cleanup', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../build/temp_gyp/googleurl.gyp:googleurl', ], 'sources': [ 'tools/tld_cleanup/tld_cleanup.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], }], ['os_posix == 1 and OS != "mac" and OS != "ios" and OS != "android"', { 'targets': [ { 'target_name': 'flip_balsa_and_epoll_library', 'type': 'static_library', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/flip_server/balsa_enums.h', 'tools/flip_server/balsa_frame.cc', 'tools/flip_server/balsa_frame.h', 'tools/flip_server/balsa_headers.cc', 'tools/flip_server/balsa_headers.h', 'tools/flip_server/balsa_headers_token_utils.cc', 'tools/flip_server/balsa_headers_token_utils.h', 'tools/flip_server/balsa_visitor_interface.h', 'tools/flip_server/constants.h', 'tools/flip_server/epoll_server.cc', 'tools/flip_server/epoll_server.h', 'tools/flip_server/http_message_constants.cc', 'tools/flip_server/http_message_constants.h', 'tools/flip_server/split.h', 'tools/flip_server/split.cc', ], }, { 'target_name': 'flip_in_mem_edsm_server', 'type': 'executable', 'cflags': [ '-Wno-deprecated', ], 'dependencies': [ '../base/base.gyp:base', '../third_party/openssl/openssl.gyp:openssl', 'flip_balsa_and_epoll_library', 'net', ], 'sources': [ 'tools/dump_cache/url_to_filename_encoder.cc', 'tools/dump_cache/url_to_filename_encoder.h', 'tools/dump_cache/url_utilities.h', 'tools/dump_cache/url_utilities.cc', 'tools/flip_server/acceptor_thread.h', 'tools/flip_server/acceptor_thread.cc', 'tools/flip_server/buffer_interface.h', 'tools/flip_server/create_listener.cc', 'tools/flip_server/create_listener.h', 'tools/flip_server/flip_config.cc', 'tools/flip_server/flip_config.h', 'tools/flip_server/flip_in_mem_edsm_server.cc', 'tools/flip_server/http_interface.cc', 'tools/flip_server/http_interface.h', 'tools/flip_server/loadtime_measurement.h', 'tools/flip_server/mem_cache.h', 'tools/flip_server/mem_cache.cc', 'tools/flip_server/output_ordering.cc', 'tools/flip_server/output_ordering.h', 'tools/flip_server/ring_buffer.cc', 'tools/flip_server/ring_buffer.h', 'tools/flip_server/simple_buffer.cc', 'tools/flip_server/simple_buffer.h', 'tools/flip_server/sm_connection.cc', 'tools/flip_server/sm_connection.h', 'tools/flip_server/sm_interface.h', 'tools/flip_server/spdy_ssl.cc', 'tools/flip_server/spdy_ssl.h', 'tools/flip_server/spdy_interface.cc', 'tools/flip_server/spdy_interface.h', 'tools/flip_server/spdy_util.cc', 'tools/flip_server/spdy_util.h', 'tools/flip_server/streamer_interface.cc', 'tools/flip_server/streamer_interface.h', 'tools/flip_server/string_piece_utils.h', ], }, { 'target_name': 'quic_library', 'type': 'static_library', 'dependencies': [ '../base/base.gyp:base', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../third_party/openssl/openssl.gyp:openssl', 'flip_balsa_and_epoll_library', 'net', ], 'sources': [ 'tools/quic/quic_client.cc', 'tools/quic/quic_client.h', 'tools/quic/quic_client_session.cc', 'tools/quic/quic_client_session.h', 'tools/quic/quic_dispatcher.h', 'tools/quic/quic_dispatcher.cc', 'tools/quic/quic_epoll_clock.cc', 'tools/quic/quic_epoll_clock.h', 'tools/quic/quic_epoll_connection_helper.cc', 'tools/quic/quic_epoll_connection_helper.h', 'tools/quic/quic_in_memory_cache.cc', 'tools/quic/quic_in_memory_cache.h', 'tools/quic/quic_packet_writer.h', 'tools/quic/quic_reliable_client_stream.cc', 'tools/quic/quic_reliable_client_stream.h', 'tools/quic/quic_reliable_server_stream.cc', 'tools/quic/quic_reliable_server_stream.h', 'tools/quic/quic_server.cc', 'tools/quic/quic_server.h', 'tools/quic/quic_server_session.cc', 'tools/quic/quic_server_session.h', 'tools/quic/quic_socket_utils.cc', 'tools/quic/quic_socket_utils.h', 'tools/quic/quic_spdy_client_stream.cc', 'tools/quic/quic_spdy_client_stream.h', 'tools/quic/quic_spdy_server_stream.cc', 'tools/quic/quic_spdy_server_stream.h', 'tools/quic/quic_time_wait_list_manager.h', 'tools/quic/quic_time_wait_list_manager.cc', 'tools/quic/spdy_utils.cc', 'tools/quic/spdy_utils.h', ], }, { 'target_name': 'quic_client', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../third_party/openssl/openssl.gyp:openssl', 'net', 'quic_library', ], 'sources': [ 'tools/quic/quic_client_bin.cc', ], }, { 'target_name': 'quic_server', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../third_party/openssl/openssl.gyp:openssl', 'net', 'quic_library', ], 'sources': [ 'tools/quic/quic_server_bin.cc', ], }, { 'target_name': 'quic_unittests', 'type': '<(gtest_target_type)', 'dependencies': [ '../base/base.gyp:test_support_base', '../testing/gmock.gyp:gmock', '../testing/gtest.gyp:gtest', 'net', 'quic_library', ], 'sources': [ 'quic/test_tools/quic_session_peer.cc', 'quic/test_tools/quic_session_peer.h', 'quic/test_tools/crypto_test_utils.cc', 'quic/test_tools/crypto_test_utils.h', 'quic/test_tools/mock_clock.cc', 'quic/test_tools/mock_clock.h', 'quic/test_tools/mock_random.cc', 'quic/test_tools/mock_random.h', 'quic/test_tools/simple_quic_framer.cc', 'quic/test_tools/simple_quic_framer.h', 'quic/test_tools/quic_connection_peer.cc', 'quic/test_tools/quic_connection_peer.h', 'quic/test_tools/quic_framer_peer.cc', 'quic/test_tools/quic_framer_peer.h', 'quic/test_tools/quic_session_peer.cc', 'quic/test_tools/quic_session_peer.h', 'quic/test_tools/quic_test_utils.cc', 'quic/test_tools/quic_test_utils.h', 'quic/test_tools/reliable_quic_stream_peer.cc', 'quic/test_tools/reliable_quic_stream_peer.h', 'tools/flip_server/simple_buffer.cc', 'tools/flip_server/simple_buffer.h', 'tools/quic/end_to_end_test.cc', 'tools/quic/quic_client_session_test.cc', 'tools/quic/quic_dispatcher_test.cc', 'tools/quic/quic_epoll_clock_test.cc', 'tools/quic/quic_epoll_connection_helper_test.cc', 'tools/quic/quic_reliable_client_stream_test.cc', 'tools/quic/quic_reliable_server_stream_test.cc', 'tools/quic/test_tools/http_message_test_utils.cc', 'tools/quic/test_tools/http_message_test_utils.h', 'tools/quic/test_tools/mock_epoll_server.cc', 'tools/quic/test_tools/mock_epoll_server.h', 'tools/quic/test_tools/quic_test_client.cc', 'tools/quic/test_tools/quic_test_client.h', 'tools/quic/test_tools/quic_test_utils.cc', 'tools/quic/test_tools/quic_test_utils.h', 'tools/quic/test_tools/run_all_unittests.cc', ], } ] }], ['OS=="android"', { 'targets': [ { 'target_name': 'net_jni_headers', 'type': 'none', 'sources': [ 'android/java/src/org/chromium/net/AndroidKeyStore.java', 'android/java/src/org/chromium/net/AndroidNetworkLibrary.java', 'android/java/src/org/chromium/net/GURLUtils.java', 'android/java/src/org/chromium/net/NetworkChangeNotifier.java', 'android/java/src/org/chromium/net/ProxyChangeListener.java', ], 'variables': { 'jni_gen_package': 'net', }, 'direct_dependent_settings': { 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)/net', ], }, 'includes': [ '../build/jni_generator.gypi' ], }, { 'target_name': 'net_test_jni_headers', 'type': 'none', 'sources': [ 'android/javatests/src/org/chromium/net/AndroidKeyStoreTestUtil.java', ], 'variables': { 'jni_gen_package': 'net', }, 'direct_dependent_settings': { 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)/net', ], }, 'includes': [ '../build/jni_generator.gypi' ], }, { 'target_name': 'net_java', 'type': 'none', 'variables': { 'java_in_dir': '../net/android/java', }, 'dependencies': [ '../base/base.gyp:base', 'cert_verify_result_android_java', 'certificate_mime_types_java', 'net_errors_java', 'private_key_types_java', ], 'includes': [ '../build/java.gypi' ], }, { 'target_name': 'net_java_test_support', 'type': 'none', 'variables': { 'java_in_dir': '../net/test/android/javatests', }, 'includes': [ '../build/java.gypi' ], }, { 'target_name': 'net_javatests', 'type': 'none', 'variables': { 'java_in_dir': '../net/android/javatests', }, 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_java_test_support', 'net_java', ], 'includes': [ '../build/java.gypi' ], }, { 'target_name': 'net_errors_java', 'type': 'none', 'sources': [ 'android/java/NetError.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['base/net_error_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, { 'target_name': 'certificate_mime_types_java', 'type': 'none', 'sources': [ 'android/java/CertificateMimeType.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['base/mime_util_certificate_type_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, { 'target_name': 'cert_verify_result_android_java', 'type': 'none', 'sources': [ 'android/java/CertVerifyResultAndroid.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['android/cert_verify_result_android_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, { 'target_name': 'private_key_types_java', 'type': 'none', 'sources': [ 'android/java/PrivateKeyType.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['android/private_key_type_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, ], }], # Special target to wrap a gtest_target_type==shared_library # net_unittests into an android apk for execution. # See base.gyp for TODO(jrg)s about this strategy. ['OS == "android" and gtest_target_type == "shared_library"', { 'targets': [ { 'target_name': 'net_unittests_apk', 'type': 'none', 'dependencies': [ 'net_java', 'net_javatests', 'net_unittests', ], 'variables': { 'test_suite_name': 'net_unittests', 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)net_unittests<(SHARED_LIB_SUFFIX)', }, 'includes': [ '../build/apk_test.gypi' ], }, ], }], ['test_isolation_mode != "noop"', { 'targets': [ { 'target_name': 'net_unittests_run', 'type': 'none', 'dependencies': [ 'net_unittests', ], 'includes': [ 'net_unittests.isolate', ], 'actions': [ { 'action_name': 'isolate', 'inputs': [ 'net_unittests.isolate', '<@(isolate_dependency_tracked)', ], 'outputs': [ '<(PRODUCT_DIR)/net_unittests.isolated', ], 'action': [ 'python', '../tools/swarm_client/isolate.py', '<(test_isolation_mode)', '--outdir', '<(test_isolation_outdir)', '--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)', '--variable', 'OS', '<(OS)', '--result', '<@(_outputs)', '--isolate', 'net_unittests.isolate', ], }, ], }, ], }], ], }
1.09375
1
wrappers/python/virgil_crypto_lib/foundation/kdf1.py
odidev/virgil-crypto-c
26
2936
# Copyright (C) 2015-2021 Virgil Security, Inc. # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # (1) Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # (2) Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # (3) Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Lead Maintainer: <NAME> Inc. <<EMAIL>> from ctypes import * from ._c_bridge import VscfKdf1 from ._c_bridge import VscfImplTag from ._c_bridge import VscfStatus from virgil_crypto_lib.common._c_bridge import Data from virgil_crypto_lib.common._c_bridge import Buffer from .alg import Alg from .kdf import Kdf class Kdf1(Alg, Kdf): """Virgil Security implementation of the KDF1 (ISO-18033-2) algorithm.""" def __init__(self): """Create underlying C context.""" self._lib_vscf_kdf1 = VscfKdf1() self._c_impl = None self._ctx = None self.ctx = self._lib_vscf_kdf1.vscf_kdf1_new() def __delete__(self, instance): """Destroy underlying C context.""" self._lib_vscf_kdf1.vscf_kdf1_delete(self.ctx) def set_hash(self, hash): self._lib_vscf_kdf1.vscf_kdf1_use_hash(self.ctx, hash.c_impl) def alg_id(self): """Provide algorithm identificator.""" result = self._lib_vscf_kdf1.vscf_kdf1_alg_id(self.ctx) return result def produce_alg_info(self): """Produce object with algorithm information and configuration parameters.""" result = self._lib_vscf_kdf1.vscf_kdf1_produce_alg_info(self.ctx) instance = VscfImplTag.get_type(result)[0].take_c_ctx(cast(result, POINTER(VscfImplTag.get_type(result)[1]))) return instance def restore_alg_info(self, alg_info): """Restore algorithm configuration from the given object.""" status = self._lib_vscf_kdf1.vscf_kdf1_restore_alg_info(self.ctx, alg_info.c_impl) VscfStatus.handle_status(status) def derive(self, data, key_len): """Derive key of the requested length from the given data.""" d_data = Data(data) key = Buffer(key_len) self._lib_vscf_kdf1.vscf_kdf1_derive(self.ctx, d_data.data, key_len, key.c_buffer) return key.get_bytes() @classmethod def take_c_ctx(cls, c_ctx): inst = cls.__new__(cls) inst._lib_vscf_kdf1 = VscfKdf1() inst.ctx = c_ctx return inst @classmethod def use_c_ctx(cls, c_ctx): inst = cls.__new__(cls) inst._lib_vscf_kdf1 = VscfKdf1() inst.ctx = inst._lib_vscf_kdf1.vscf_kdf1_shallow_copy(c_ctx) return inst @property def c_impl(self): return self._c_impl @property def ctx(self): return self._ctx @ctx.setter def ctx(self, value): self._ctx = self._lib_vscf_kdf1.vscf_kdf1_shallow_copy(value) self._c_impl = self._lib_vscf_kdf1.vscf_kdf1_impl(self.ctx)
1.414063
1
venv/lib/python3.9/site-packages/py2app/bootstrap/disable_linecache.py
dequeb/asmbattle
193
2944
def _disable_linecache(): import linecache def fake_getline(*args, **kwargs): return "" linecache.orig_getline = linecache.getline linecache.getline = fake_getline _disable_linecache()
0.746094
1
blog/views.py
kbilak/City-portal
0
2952
from django.shortcuts import render from django.views.generic import TemplateView def index(request): return render(request, 'index.html')
0.816406
1
lib/bridgedb/email/request.py
liudonghua123/bridgedb
0
2976
# -*- coding: utf-8; test-case-name: bridgedb.test.test_email_request; -*- #_____________________________________________________________________________ # # This file is part of BridgeDB, a Tor bridge distribution system. # # :authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> 0xA3ADB67A2CDB8B35 # <NAME> <<EMAIL>> # please also see AUTHORS file # :copyright: (c) 2007-2015, The Tor Project, Inc. # (c) 2013-2015, Isis Lovecruft # :license: see LICENSE for licensing information #_____________________________________________________________________________ """ .. py:module:: bridgedb.email.request :synopsis: Classes for parsing and storing information about requests for bridges which are sent to the email distributor. bridgedb.email.request ====================== Classes for parsing and storing information about requests for bridges which are sent to the email distributor. :: bridgedb.email.request | |_ determineBridgeRequestOptions - Figure out which filters to apply, or | offer help. |_ EmailBridgeRequest - A request for bridges which was received through the email distributor. .. """ from __future__ import print_function from __future__ import unicode_literals import logging import re from bridgedb import bridgerequest from bridgedb.Dist import EmailRequestedHelp from bridgedb.Dist import EmailRequestedKey #: A regular expression for matching the Pluggable Transport method TYPE in #: emailed requests for Pluggable Transports. TRANSPORT_REGEXP = ".*transport ([a-z][_a-z0-9]*)" TRANSPORT_PATTERN = re.compile(TRANSPORT_REGEXP) #: A regular expression that matches country codes in requests for unblocked #: bridges. UNBLOCKED_REGEXP = ".*unblocked ([a-z]{2,4})" UNBLOCKED_PATTERN = re.compile(UNBLOCKED_REGEXP) def determineBridgeRequestOptions(lines): """Figure out which :class:`Bridges.BridgeFilter`s to apply, or offer help. .. note:: If any ``'transport TYPE'`` was requested, or bridges not blocked in a specific CC (``'unblocked CC'``), then the ``TYPE`` and/or ``CC`` will *always* be stored as a *lowercase* string. :param list lines: A list of lines from an email, including the headers. :raises EmailRequestedHelp: if the client requested help. :raises EmailRequestedKey: if the client requested our GnuPG key. :rtype: :class:`EmailBridgeRequest` :returns: A :class:`~bridgerequst.BridgeRequest` with all of the requested parameters set. The returned ``BridgeRequest`` will have already had its filters generated via :meth:`~EmailBridgeRequest.generateFilters`. """ request = EmailBridgeRequest() skippedHeaders = False for line in lines: line = line.strip().lower() # Ignore all lines before the first empty line: if not line: skippedHeaders = True if not skippedHeaders: continue if ("help" in line) or ("halp" in line): raise EmailRequestedHelp("Client requested help.") if "get" in line: request.isValid(True) logging.debug("Email request was valid.") if "key" in line: request.wantsKey(True) raise EmailRequestedKey("Email requested a copy of our GnuPG key.") if "ipv6" in line: request.withIPv6() if "transport" in line: request.withPluggableTransportType(line) if "unblocked" in line: request.withoutBlockInCountry(line) logging.debug("Generating hashring filters for request.") request.generateFilters() return request class EmailBridgeRequest(bridgerequest.BridgeRequestBase): """We received a request for bridges through the email distributor.""" def __init__(self): """Process a new bridge request received through the :class:`~bridgedb.Dist.EmailBasedDistributor`. """ super(EmailBridgeRequest, self).__init__() self._isValid = False self._wantsKey = False def isValid(self, valid=None): """Get or set the validity of this bridge request. If called without parameters, this method will return the current state, otherwise (if called with the **valid** parameter), it will set the current state of validity for this request. :param bool valid: If given, set the validity state of this request. Otherwise, get the current state. """ if valid is not None: self._isValid = bool(valid) return self._isValid def wantsKey(self, wantsKey=None): """Get or set whether this bridge request wanted our GnuPG key. If called without parameters, this method will return the current state, otherwise (if called with the **wantsKey** parameter set), it will set the current state for whether or not this request wanted our key. :param bool wantsKey: If given, set the validity state of this request. Otherwise, get the current state. """ if wantsKey is not None: self._wantsKey = bool(wantsKey) return self._wantsKey def withoutBlockInCountry(self, line): """This request was for bridges not blocked in **country**. Add any country code found in the **line** to the list of ``notBlockedIn``. Currently, a request for a transport is recognized if the email line contains the ``'unblocked'`` command. :param str country: The line from the email wherein the client requested some type of Pluggable Transport. """ unblocked = None logging.debug("Parsing 'unblocked' line: %r" % line) try: unblocked = UNBLOCKED_PATTERN.match(line).group(1) except (TypeError, AttributeError): pass if unblocked: self.notBlockedIn.append(unblocked) logging.info("Email requested bridges not blocked in: %r" % unblocked) def withPluggableTransportType(self, line): """This request included a specific Pluggable Transport identifier. Add any Pluggable Transport method TYPE found in the **line** to the list of ``transports``. Currently, a request for a transport is recognized if the email line contains the ``'transport'`` command. :param str line: The line from the email wherein the client requested some type of Pluggable Transport. """ transport = None logging.debug("Parsing 'transport' line: %r" % line) try: transport = TRANSPORT_PATTERN.match(line).group(1) except (TypeError, AttributeError): pass if transport: self.transports.append(transport) logging.info("Email requested transport type: %r" % transport)
1.601563
2
tools/mkcodelet.py
bobmittmann/yard-ice
2
2992
#!/usr/bin/python from struct import * from getopt import * import sys import os import re def usage(): global progname print >> sys.stderr, "" print >> sys.stderr, " Usage:", progname, "[options] fname" print >> sys.stderr, "" print >> sys.stderr, "Options" print >> sys.stderr, " -h, --help show this help message and exit" print >> sys.stderr, " -o FILENAME, --addr=FILENAME" print >> sys.stderr, "" def error(msg): print >> sys.stderr, "" print >> sys.stderr, "#error:", msg usage() sys.exit(2) def mk_codelet(in_fname, out_fname, hdr_fname): try: in_file = open(in_fname, mode='r') except: print >> sys.stderr, "#error: can't open file: '%s'" % in_fname sys.exit(1) try: c_file = open(out_fname, mode='w') except: print >> sys.stderr, "#error: can't create file: %s" % out_fname sys.exit(1) try: h_file = open(hdr_fname, mode='w') except: print >> sys.stderr, "#error: can't create file: %s" % hdr_fname sys.exit(1) i = 0 for line in in_file: if re.match("SYMBOL TABLE:", line): break s_pat = re.compile("([0-9a-f]{8}) ..*[0-9a-f]{8} ([.A-Za-z_][A-Za-z_0-9]*)") sym = {} for line in in_file: m = s_pat.findall(line) if m: addr = int(m[0][0], 16) name = m[0][1] sym[addr] = name else: break for line in in_file: if re.match("Contents of section .text:", line): break token_pat = re.compile("([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})") c_file.write("#include <stdint.h>\n\n") h_file.write("#include <stdint.h>\n\n") addr = 0 i = 0 for line in in_file: for a, b, c, d in token_pat.findall(line): try: sym[addr] if (i > 0): c_file.write("\n};\n\n") c_file.write("const uint32_t %s[] = {" % sym[addr]) h_file.write("extern const uint32_t %s[];\n\n" % sym[addr]) i = 0 except KeyError: pass if ((i % 4) == 0): if (i > 0): c_file.write(",") c_file.write("\n\t0x" + d + c + b + a) else: c_file.write(", 0x" + d + c + b + a ) i = i + 1; addr = addr + 4 c_file.write("\n};\n") in_file.close() c_file.close() h_file.close() return def main(): global progname progname = sys.argv[0] try: opts, args = getopt(sys.argv[1:], "ho:", \ ["help", "output="]) except GetoptError, err: error(str(err)) for o, a in opts: if o in ("-h", "--help"): usage() sys.exit() elif o in ("-o", "--output"): out_fname = a else: assert False, "unhandled option" if len(args) == 0: error("missing fname") if len(args) > 1: error("too many arguments") in_fname = args[0] try: out_fname except NameError: dirname, fname = os.path.split(in_fname) basename, extension = os.path.splitext(fname) out_fname = basename + '.' + 'c' dirname, fname = os.path.split(out_fname) basename, extension = os.path.splitext(fname) hdr_fname = basename + '.' + 'h' mk_codelet(in_fname, out_fname, hdr_fname) if __name__ == "__main__": main()
1.5
2
templates_deepdive_app_bagofwords/udf/dd_extract_features.py
charlieccarey/rdoc
0
3008
#!/usr/bin/env python from __future__ import print_function ''' 1\taaaa~^~bbbb~^~cccc 2\tdddd~^~EEEE~^~ffff ''' import sys ARR_DELIM = '~^~' for row in sys.stdin: row = row.strip() sent_id, lemmas = row.split('\t') lemmas = lemmas.split(ARR_DELIM) for lemma in lemmas: print('{}\t{}'.format(sent_id, lemma))
1.25
1
integreat_cms/api/v3/regions.py
Integreat/cms-django
21
3024
""" This module includes functions related to the regions API endpoint. """ from django.http import JsonResponse from ...cms.models import Region from ...cms.constants import region_status from ..decorators import json_response def transform_region(region): """ Function to create a JSON from a single region object, including information if region is live/active. :param region: The region object which should be converted :type region: ~integreat_cms.cms.models.regions.region.Region :return: data necessary for API :rtype: dict """ return { "id": region.id, "name": region.full_name, "path": region.slug, "live": region.status == region_status.ACTIVE, "prefix": region.prefix, "name_without_prefix": region.name, "plz": region.postal_code, "extras": region.offers.exists(), "events": region.events_enabled, "pois": region.locations_enabled, "push_notifications": region.push_notifications_enabled, "longitude": region.longitude, "latitude": region.latitude, "bounding_box": region.bounding_box.api_representation, "aliases": region.aliases, "tunews": region.tunews_enabled, } def transform_region_by_status(region): """ Function to create a JSON from a single "active" region object. :param region: The region object which should be converted :type region: ~integreat_cms.cms.models.regions.region.Region :return: data necessary for API :rtype: dict """ result = transform_region(region) # Remove status del result["live"] return result @json_response def regions(_): """ List all regions that are not archived and transform result into JSON :return: JSON object according to APIv3 regions endpoint definition :rtype: ~django.http.JsonResponse """ result = list( map(transform_region, Region.objects.exclude(status=region_status.ARCHIVED)) ) return JsonResponse( result, safe=False ) # Turn off Safe-Mode to allow serializing arrays @json_response def liveregions(_): """ List all regions that are not archived and transform result into JSON :return: JSON object according to APIv3 live regions endpoint definition :rtype: ~django.http.JsonResponse """ result = list( map( transform_region_by_status, Region.objects.filter(status=region_status.ACTIVE), ) ) return JsonResponse( result, safe=False ) # Turn off Safe-Mode to allow serializing arrays @json_response def hiddenregions(_): """ List all regions that are hidden and transform result into JSON :return: JSON object according to APIv3 hidden regions endpoint definition :rtype: ~django.http.JsonResponse """ result = list( map( transform_region_by_status, Region.objects.filter(status=region_status.HIDDEN), ) ) return JsonResponse( result, safe=False ) # Turn off Safe-Mode to allow serializing arrays
1.476563
1
pincer/objects/message/sticker.py
mjneff2/Pincer
0
3032
# Copyright Pincer 2021-Present # Full MIT License can be found in `LICENSE` at the project root. from __future__ import annotations from dataclasses import dataclass from enum import IntEnum from typing import List, Optional, TYPE_CHECKING from ...utils.api_object import APIObject from ...utils.types import MISSING if TYPE_CHECKING: from ..user import User from ...utils import APINullable, Snowflake class StickerType(IntEnum): """ Displays from where the sticker comes from. :param STANDARD: Sticker is included in the default Discord sticker pack. :param GUILD: Sticker is a custom sticker from a discord server. """ STANDARD = 1 GUILD = 2 class StickerFormatType(IntEnum): """ The type of the sticker. :param PNG: Sticker is of PNG format. :param APNG: Sticker is animated with APNG format. :param LOTTIE: Sticker is animated with with LOTTIE format. (vector based) """ PNG = 1 APNG = 2 LOTTIE = 3 @dataclass class Sticker(APIObject): """ Represents a Discord sticker. :param description: description of the sticker :param format_type: type of sticker format :param id: id of the sticker :param name: name of the sticker :param tags: for guild stickers, the Discord name of a unicode emoji representing the sticker's expression. For standard stickers, a comma-separated list of related expressions. :param type: type of sticker :param available: whether this guild sticker can be used, may be false due to loss of Server Boosts :param guild_id: id of the guild that owns this sticker :param pack_id: for standard stickers, id of the pack the sticker is from :param sort_value: the standard sticker's sort order within its pack :param user: the user that uploaded the guild sticker """ description: Optional[str] format_type: StickerFormatType id: Snowflake name: str tags: str type: StickerType available: APINullable[bool] = MISSING guild_id: APINullable[Snowflake] = MISSING pack_id: APINullable[Snowflake] = MISSING sort_value: APINullable[int] = MISSING user: APINullable[User] = MISSING @dataclass class StickerItem(APIObject): """ Represents the smallest amount of data required to render a sticker. A partial sticker object. :param id: id of the sticker :param name: name of the sticker :param format_type: type of sticker format """ id: Snowflake name: str format_type: StickerFormatType @dataclass class StickerPack(APIObject): """ Represents a pack of standard stickers. :param id: id of the sticker pack :param stickers: the stickers in the pack :param name: name of the sticker pack :param sku_id: id of the pack's SKU :param description: description of the sticker pack :param cover_sticker_id: id of a sticker in the pack which is shown as the pack's icon :param banner_asset_id: id of the sticker pack's banner image """ id: Snowflake stickers: List[Sticker] name: str sku_id: Snowflake description: str cover_sticker_id: APINullable[Snowflake] = MISSING banner_asset_id: APINullable[Snowflake] = MISSING
1.851563
2
src/cms/views/error_handler/error_handler.py
digitalfabrik/coldaid-backend
4
3040
from django.shortcuts import render from django.utils.translation import ugettext as _ # pylint: disable=unused-argument def handler400(request, exception): ctx = {'code': 400, 'title': _('Bad request'), 'message': _('There was an error in your request.')} response = render(request, 'error_handler/http_error.html', ctx) response.status_code = 400 return response # pylint: disable=unused-argument def handler403(request, exception): ctx = {'code': 403, 'title': _('Forbidden'), 'message': _("You don't have the permission to access this page.")} response = render(request, 'error_handler/http_error.html', ctx) response.status_code = 403 return response # pylint: disable=unused-argument def handler404(request, exception): ctx = {'code': 404, 'title': _('Page not found'), 'message': _('The page you requested could not be found.')} response = render(request, 'error_handler/http_error.html', ctx) response.status_code = 404 return response # pylint: disable=unused-argument def handler500(request): ctx = {'code': 500, 'title': _('Internal Server Error'), 'message': _('An unexpected error has occurred.')} response = render(request, 'error_handler/http_error.html', ctx) response.status_code = 500 return response # pylint: disable=unused-argument def csrf_failure(request, reason): return render(request, 'error_handler/csrf_failure.html')
1.578125
2
Convert Integer A to Integer B.py
RijuDasgupta9116/LintCode
321
3072
""" Determine the number of bits required to convert integer A to integer B Example Given n = 31, m = 14,return 2 (31)10=(11111)2 (14)10=(01110)2 """ __author__ = 'Danyang' class Solution: def bitSwapRequired(self, a, b): """ :param a: :param b: :return: int """ a = self.to_bin(a) b = self.to_bin(b) diff = len(a)-len(b) ret = 0 if diff<0: a, b = b, a diff *= -1 b = "0"*diff+b for i in xrange(len(b)): if a[i]!=b[i]: ret += 1 return ret def to_bin(self, n): """ 2's complement 32-bit :param n: :return: """ """ :param n: :return: """ a = abs(n) lst = [] while a>0: lst.append(a%2) a /= 2 # 2's complement if n>=0: lst.extend([0]*(32-len(lst))) else: pivot = -1 for i in xrange(len(lst)): if pivot==-1 and lst[i]==1: pivot = i continue if pivot!=-1: lst[i] ^= 1 lst.extend([1]*(32-len(lst))) return "".join(map(str, reversed(lst))) if __name__=="__main__": assert Solution().bitSwapRequired(1, -1)==31 assert Solution().bitSwapRequired(31, 14)==2
2.90625
3
day09/part2.py
mtn/advent16
0
3080
#!/usr/bin/env python3 import re with open("input.txt") as f: content = f.read().strip() def ulen(content): ans = 0 i = 0 while i < len(content): if content[i] == "(": end = content[i:].find(")") + i instr = content[i+1:end] chars, times = map(int, content[i+1:end].split("x")) to_copy = content[end+1:end+1+chars] to_copy_len = ulen(to_copy) ans += times * to_copy_len i = end + 1 + chars else: ans += 1 i += 1 return ans print(ulen(content))
2.328125
2
Leetcode/Python/_1721.py
Xrenya/algorithms
0
3112
# Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution: def swapNodes(self, head: Optional[ListNode], k: int) -> Optional[ListNode]: temp = head array = [] while temp: array.append(temp.val) temp = temp.next array[k - 1], array[len(array) - k] = array[len(array) - k], array[k - 1] head = ListNode(0) dummy = head for num in array: dummy.next = ListNode(num) dummy = dummy.next return head.next # Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution: def swapNodes(self, head: Optional[ListNode], k: int) -> Optional[ListNode]: if head is None or head.next is None: return head slow = fast = cnt = head counter = 0 while cnt: counter += 1 cnt = cnt.next for _ in range(k - 1): slow = slow.next for _ in range(counter - k): fast = fast.next slow.val, fast.val = fast.val, slow.val return head
3.015625
3
contacts/forms.py
pedrohd21/Agenda-Django
1
3120
from django import forms from .models import Contact class ContactForm(forms.ModelForm): class Meta: model = Contact fields = ('name', 'number', 'email', 'category', 'description')
0.957031
1
tests/utils/test_metrics.py
haochuanwei/hover
251
3136
from hover.utils.metrics import classification_accuracy import numpy as np def test_classification_accuracy(): true = np.array([1, 2, 3, 4, 5, 6, 7, 7]) pred = np.array([1, 2, 3, 4, 5, 6, 7, 8]) accl = classification_accuracy(true, pred) accr = classification_accuracy(pred, true) assert np.allclose(accl, 7/8) assert np.allclose(accr, 7/8)
1.609375
2
server/mqtt/handler.py
rishab-rb/MyIOTMap
1
3184
import paho.client as mqtt HOST = 'localhost' PORT = 1883 class MQTTConnector: def __init__(self, host, port): host = host port = port client = mqtt.Client() def connect(): self.client.connect(self.host, self.port, 60) def run(self): self.client.loop_forever() class MQTTSubscriber: def __init__(self, *args, **kwargs): super(MQTTSubscriber, self).__init__(*args, **kwargs) class MQTTPublisher: def __init__(self, host)
1.34375
1
basic_and.py
Verkhovskaya/PyDL
5
3232
from pywire import * def invert(signal): if signal: return False else: return True class Inverter: def __init__(self, a, b): b.drive(invert, a) width = 4 a = Signal(width, io="in") b = Signal(width, io="out") Inverter(a, b) build()
1.71875
2
tests/test_dcd_api.py
sadamek/pyIMX
0
3256
# # SPDX-License-Identifier: BSD-3-Clause # The BSD-3-Clause license for this file can be found in the LICENSE file included with this distribution # or at https://spdx.org/licenses/BSD-3-Clause.html#licenseText import os import pytest from imx import img # Used Directories DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data') # Test Files DCD_TXT = os.path.join(DATA_DIR, 'dcd_test.txt') DCD_BIN = os.path.join(DATA_DIR, 'dcd_test.bin') def setup_module(module): # Prepare test environment pass def teardown_module(module): # Clean test environment pass def test_txt_parser(): with open(DCD_TXT, 'r') as f: dcd_obj = img.SegDCD.parse_txt(f.read()) assert dcd_obj is not None assert len(dcd_obj) == 12 def test_bin_parser(): with open(DCD_BIN, 'rb') as f: dcd_obj = img.SegDCD.parse(f.read()) assert dcd_obj is not None assert len(dcd_obj) == 12
1.304688
1
docs/schema_mapping.py
NoAnyLove/pydantic
1
3280
#!/usr/bin/env python3 """ Build a table of Python / Pydantic to JSON Schema mappings. Done like this rather than as a raw rst table to make future edits easier. Please edit this file directly not .tmp_schema_mappings.rst """ table = [ [ 'bool', 'boolean', '', 'JSON Schema Core', '' ], [ 'str', 'string', '', 'JSON Schema Core', '' ], [ 'float', 'number', '', 'JSON Schema Core', '' ], [ 'int', 'integer', '', 'JSON Schema Validation', '' ], [ 'dict', 'object', '', 'JSON Schema Core', '' ], [ 'list', 'array', '', 'JSON Schema Core', '' ], [ 'tuple', 'array', '', 'JSON Schema Core', '' ], [ 'set', 'array', '{"uniqueItems": true}', 'JSON Schema Validation', '' ], [ 'List[str]', 'array', '{"items": {"type": "string"}}', 'JSON Schema Validation', 'And equivalently for any other sub type, e.g. List[int].' ], [ 'Tuple[str, int]', 'array', '{"items": [{"type": "string"}, {"type": "integer"}]}', 'JSON Schema Validation', ( 'And equivalently for any other set of subtypes. Note: If using schemas for OpenAPI, ' 'you shouldn\'t use this declaration, as it would not be valid in OpenAPI (although it is ' 'valid in JSON Schema).' ) ], [ 'Dict[str, int]', 'object', '{"additionalProperties": {"type": "integer"}}', 'JSON Schema Validation', ( 'And equivalently for any other subfields for dicts. Have in mind that although you can use other types as ' 'keys for dicts with Pydantic, only strings are valid keys for JSON, and so, only str is valid as ' 'JSON Schema key types.' ) ], [ 'Union[str, int]', 'anyOf', '{"anyOf": [{"type": "string"}, {"type": "integer"}]}', 'JSON Schema Validation', 'And equivalently for any other subfields for unions.' ], [ 'Enum', 'enum', '{"enum": [...]}', 'JSON Schema Validation', 'All the literal values in the enum are included in the definition.' ], [ 'SecretStr', 'string', '{"writeOnly": true}', 'JSON Schema Validation', '' ], [ 'SecretBytes', 'string', '{"writeOnly": true}', 'JSON Schema Validation', '' ], [ 'EmailStr', 'string', '{"format": "email"}', 'JSON Schema Validation', '' ], [ 'NameEmail', 'string', '{"format": "name-email"}', 'Pydantic standard "format" extension', '' ], [ 'UrlStr', 'string', '{"format": "uri"}', 'JSON Schema Validation', '' ], [ 'DSN', 'string', '{"format": "dsn"}', 'Pydantic standard "format" extension', '' ], [ 'bytes', 'string', '{"format": "binary"}', 'OpenAPI', '' ], [ 'Decimal', 'number', '', 'JSON Schema Core', '' ], [ 'UUID1', 'string', '{"format": "uuid1"}', 'Pydantic standard "format" extension', '' ], [ 'UUID3', 'string', '{"format": "uuid3"}', 'Pydantic standard "format" extension', '' ], [ 'UUID4', 'string', '{"format": "uuid4"}', 'Pydantic standard "format" extension', '' ], [ 'UUID5', 'string', '{"format": "uuid5"}', 'Pydantic standard "format" extension', '' ], [ 'UUID', 'string', '{"format": "uuid"}', 'Pydantic standard "format" extension', 'Suggested in OpenAPI.' ], [ 'FilePath', 'string', '{"format": "file-path"}', 'Pydantic standard "format" extension', '' ], [ 'DirectoryPath', 'string', '{"format": "directory-path"}', 'Pydantic standard "format" extension', '' ], [ 'Path', 'string', '{"format": "path"}', 'Pydantic standard "format" extension', '' ], [ 'datetime', 'string', '{"format": "date-time"}', 'JSON Schema Validation', '' ], [ 'date', 'string', '{"format": "date"}', 'JSON Schema Validation', '' ], [ 'time', 'string', '{"format": "time"}', 'JSON Schema Validation', '' ], [ 'timedelta', 'number', '{"format": "time-delta"}', 'Difference in seconds (a ``float``), with Pydantic standard "format" extension', 'Suggested in JSON Schema repository\'s issues by maintainer.' ], [ 'Json', 'string', '{"format": "json-string"}', 'Pydantic standard "format" extension', '' ], [ 'IPvAnyAddress', 'string', '{"format": "ipvanyaddress"}', 'Pydantic standard "format" extension', 'IPv4 or IPv6 address as used in ``ipaddress`` module', ], [ 'IPvAnyInterface', 'string', '{"format": "ipvanyinterface"}', 'Pydantic standard "format" extension', 'IPv4 or IPv6 interface as used in ``ipaddress`` module', ], [ 'IPvAnyNetwork', 'string', '{"format": "ipvanynetwork"}', 'Pydantic standard "format" extension', 'IPv4 or IPv6 network as used in ``ipaddress`` module', ], [ 'StrictStr', 'string', '', 'JSON Schema Core', '' ], [ 'ConstrainedStr', 'string', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for ``constr`` below.' ) ], [ 'constr(regex=\'^text$\', min_length=2, max_length=10)', 'string', '{"pattern": "^text$", "minLength": 2, "maxLength": 10}', 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'ConstrainedInt', 'integer', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for ``conint`` below.' ) ], [ 'conint(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'integer', '{"maximum": 5, "exclusiveMaximum": 6, "minimum": 2, "exclusiveMinimum": 1, "multipleOf": 2}', '', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'PositiveInt', 'integer', '{"exclusiveMinimum": 0}', 'JSON Schema Validation', '' ], [ 'NegativeInt', 'integer', '{"exclusiveMaximum": 0}', 'JSON Schema Validation', '' ], [ 'ConstrainedFloat', 'number', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations.' 'See the mapping for ``confloat`` below.' ) ], [ 'confloat(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'number', '{"maximum": 5, "exclusiveMaximum": 6, "minimum": 2, "exclusiveMinimum": 1, "multipleOf": 2}', 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'PositiveFloat', 'number', '{"exclusiveMinimum": 0}', 'JSON Schema Validation', '' ], [ 'NegativeFloat', 'number', '{"exclusiveMaximum": 0}', 'JSON Schema Validation', '' ], [ 'ConstrainedDecimal', 'number', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for ``condecimal`` below.' ) ], [ 'condecimal(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'number', '{"maximum": 5, "exclusiveMaximum": 6, "minimum": 2, "exclusiveMinimum": 1, "multipleOf": 2}', 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'BaseModel', 'object', '', 'JSON Schema Core', 'All the properties defined will be defined with standard JSON Schema, including submodels.' ] ] headings = [ 'Python type', 'JSON Schema Type', 'Additional JSON Schema', 'Defined in', 'Notes', ] v = '' col_width = 300 for _ in range(5): v += '+' + '-' * col_width v += '+\n|' for heading in headings: v += f' {heading:{col_width - 2}} |' v += '\n' for _ in range(5): v += '+' + '=' * col_width v += '+' for row in table: v += '\n|' for i, text in enumerate(row): text = f'``{text}``' if i < 3 and text else text v += f' {text:{col_width - 2}} |' v += '\n' for _ in range(5): v += '+' + '-' * col_width v += '+' with open('.tmp_schema_mappings.rst', 'w') as f: f.write(v)
1.234375
1
image_classification/T2T_ViT/load_pytorch_weights.py
RangeKing/PaddleViT
0
3288
# Copyright (c) 2021 PPViT Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """convert pytorch model weights to paddle pdparams""" import os import numpy as np import paddle import torch import timm from config import get_config from t2t_vit import build_t2t_vit as build_model from T2T_ViT_torch.models.t2t_vit import * from T2T_ViT_torch.utils import load_for_transfer_learning def print_model_named_params(model): print('----------------------------------') for name, param in model.named_parameters(): print(name, param.shape) print('----------------------------------') def print_model_named_buffers(model): print('----------------------------------') for name, param in model.named_buffers(): print(name, param.shape) print('----------------------------------') def torch_to_paddle_mapping(model_name, config): # (torch_param_name, paddle_param_name) mapping = [ ('cls_token', 'cls_token'), ('pos_embed', 'pos_embed'), ] for idx in range(1, 3): th_prefix = f'tokens_to_token.attention{idx}' pp_prefix = f'patch_embed.attn{idx}' if '_t_' in model_name: layer_mapping = [ (f'{th_prefix}.attn.qkv', f'{pp_prefix}.attn.qkv'), (f'{th_prefix}.attn.proj', f'{pp_prefix}.attn.proj'), (f'{th_prefix}.norm1', f'{pp_prefix}.norm1'), (f'{th_prefix}.norm2', f'{pp_prefix}.norm2'), (f'{th_prefix}.mlp.fc1', f'{pp_prefix}.mlp.fc1'), (f'{th_prefix}.mlp.fc2', f'{pp_prefix}.mlp.fc2'), ] else: layer_mapping = [ (f'{th_prefix}.w', f'{pp_prefix}.w'), (f'{th_prefix}.kqv', f'{pp_prefix}.kqv'), (f'{th_prefix}.proj', f'{pp_prefix}.proj'), (f'{th_prefix}.norm1', f'{pp_prefix}.norm1'), (f'{th_prefix}.norm2', f'{pp_prefix}.norm2'), (f'{th_prefix}.mlp.0', f'{pp_prefix}.mlp.0'), (f'{th_prefix}.mlp.2', f'{pp_prefix}.mlp.2'), ] mapping.extend(layer_mapping) mapping.append(('tokens_to_token.project','patch_embed.proj')) num_layers = config.MODEL.DEPTH for idx in range(num_layers): th_prefix = f'blocks.{idx}' pp_prefix = f'blocks.{idx}' layer_mapping = [ (f'{th_prefix}.norm1', f'{pp_prefix}.norm1'), (f'{th_prefix}.attn.qkv', f'{pp_prefix}.attn.qkv'), (f'{th_prefix}.attn.proj', f'{pp_prefix}.attn.proj'), (f'{th_prefix}.norm2', f'{pp_prefix}.norm2'), (f'{th_prefix}.mlp.fc1', f'{pp_prefix}.mlp.fc1'), (f'{th_prefix}.mlp.fc2', f'{pp_prefix}.mlp.fc2'), ] mapping.extend(layer_mapping) head_mapping = [ ('norm', 'norm'), ('head', 'head'), ] mapping.extend(head_mapping) return mapping def convert(torch_model, paddle_model, model_name, config): def _set_value(th_name, pd_name, transpose=True): th_shape = th_params[th_name].shape pd_shape = tuple(pd_params[pd_name].shape) # paddle shape default type is list #assert th_shape == pd_shape, f'{th_shape} != {pd_shape}' print(f'**SET** {th_name} {th_shape} **TO** {pd_name} {pd_shape}') if isinstance(th_params[th_name], torch.nn.parameter.Parameter): value = th_params[th_name].data.numpy() else: value = th_params[th_name].numpy() if len(value.shape) == 2 and transpose: value = value.transpose((1, 0)) pd_params[pd_name].set_value(value) # 1. get paddle and torch model parameters pd_params = {} th_params = {} for name, param in paddle_model.named_parameters(): pd_params[name] = param for name, param in torch_model.named_parameters(): th_params[name] = param for name, param in paddle_model.named_buffers(): pd_params[name] = param for name, param in torch_model.named_buffers(): th_params[name] = param # 2. get name mapping pairs mapping = torch_to_paddle_mapping(model_name, config) missing_keys_th = [] missing_keys_pd = [] zip_map = list(zip(*mapping)) th_keys = list(zip_map[0]) pd_keys = list(zip_map[1]) for key in th_params: missing = False if key not in th_keys: missing = True if key.endswith('.weight'): if key[:-7] in th_keys: missing = False if key.endswith('.bias'): if key[:-5] in th_keys: missing = False if missing: missing_keys_th.append(key) for key in pd_params: missing = False if key not in pd_keys: missing = True if key.endswith('.weight'): if key[:-7] in pd_keys: missing = False if key.endswith('.bias'): if key[:-5] in pd_keys: missing = False if missing: missing_keys_pd.append(key) print('====================================') print('missing_keys_pytorch:') print(missing_keys_th) print('missing_keys_paddle:') print(missing_keys_pd) print('====================================') # 3. set torch param values to paddle params: may needs transpose on weights for th_name, pd_name in mapping: if th_name in th_params and pd_name in pd_params: # nn.Parameters if th_name.endswith('w'): _set_value(th_name, pd_name, transpose=False) else: _set_value(th_name, pd_name) else: if f'{th_name}.weight' in th_params and f'{pd_name}.weight' in pd_params: th_name_w = f'{th_name}.weight' pd_name_w = f'{pd_name}.weight' _set_value(th_name_w, pd_name_w) if f'{th_name}.bias' in th_params and f'{pd_name}.bias' in pd_params: th_name_b = f'{th_name}.bias' pd_name_b = f'{pd_name}.bias' _set_value(th_name_b, pd_name_b) if f'{th_name}.running_mean' in th_params and f'{pd_name}._mean' in pd_params: th_name_b = f'{th_name}.running_mean' pd_name_b = f'{pd_name}._mean' _set_value(th_name_b, pd_name_b) if f'{th_name}.running_var' in th_params and f'{pd_name}._variance' in pd_params: th_name_b = f'{th_name}.running_var' pd_name_b = f'{pd_name}._variance' _set_value(th_name_b, pd_name_b) return paddle_model def main(): paddle.set_device('cpu') model_name_list = ['t2t_vit_7', 't2t_vit_10', 't2t_vit_12', 't2t_vit_14', 't2t_vit_14_384', 't2t_vit_19', 't2t_vit_24', 't2t_vit_24_token_labeling', 't2t_vit_t_14', 't2t_vit_t_19', 't2t_vit_t_24'] pth_model_path_list = ['./T2T_ViT_torch/t2t-vit-pth-models/71.7_T2T_ViT_7.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/75.2_T2T_ViT_10.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/76.5_T2T_ViT_12.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/81.5_T2T_ViT_14.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/83.3_T2T_ViT_14.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/81.9_T2T_ViT_19.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/82.3_T2T_ViT_24.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/84.2_T2T_ViT_24.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/81.7_T2T_ViTt_14.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/82.4_T2T_ViTt_19.pth.tar', './T2T_ViT_torch/t2t-vit-pth-models/82.6_T2T_ViTt_24.pth.tar'] for model_name, pth_model_path in zip(model_name_list, pth_model_path_list): print(f'============= NOW: {model_name} =============') sz = 384 if '384' in model_name else 224 if 'token_labeling' in model_name: config = get_config(f'./configs/{model_name[:-15]}.yaml') else: config = get_config(f'./configs/{model_name}.yaml') paddle_model = build_model(config) paddle_model.eval() print_model_named_params(paddle_model) print_model_named_buffers(paddle_model) print('+++++++++++++++++++++++++++++++++++') device = torch.device('cpu') if 'token_labeling' in model_name: torch_model = eval(f'{model_name[:-15]}(img_size={sz})') else: if '384' in model_name: torch_model = eval(f'{model_name[:-4]}(img_size={sz})') else: torch_model = eval(f'{model_name}(img_size={sz})') load_for_transfer_learning(torch_model, pth_model_path, use_ema=True, strict=False, num_classes=1000) torch_model = torch_model.to(device) torch_model.eval() print_model_named_params(torch_model) print_model_named_buffers(torch_model) # convert weights paddle_model = convert(torch_model, paddle_model, model_name, config) # check correctness x = np.random.randn(2, 3, sz, sz).astype('float32') x_paddle = paddle.to_tensor(x) x_torch = torch.Tensor(x).to(device) out_torch = torch_model(x_torch) out_paddle = paddle_model(x_paddle) out_torch = out_torch.data.cpu().numpy() out_paddle = out_paddle.cpu().numpy() print(out_torch.shape, out_paddle.shape) print(out_torch[0, 0:100]) print('========================================================') print(out_paddle[0, 0:100]) assert np.allclose(out_torch, out_paddle, atol = 1e-2) # save weights for paddle model model_path = os.path.join(f'./{model_name}.pdparams') paddle.save(paddle_model.state_dict(), model_path) print(f'{model_name} done') print('all done') if __name__ == "__main__": main()
1.648438
2
Algorithmic Toolbox/Greedy Algorithms/Maximum Advertisement Revenue/maximum_ad_revenue.py
ganeshbhandarkar/Python-Projects
9
3312
# python3 from itertools import permutations def max_dot_product_naive(first_sequence, second_sequence): assert len(first_sequence) == len(second_sequence) assert len(first_sequence) <= 10 ** 3 assert all(0 <= f <= 10 ** 5 for f in first_sequence) assert all(0 <= s <= 10 ** 5 for s in second_sequence) max_product = 0 for permutation in permutations(second_sequence): dot_product = sum(first_sequence[i] * permutation[i] for i in range(len(first_sequence))) max_product = max(max_product, dot_product) return max_product def max_dot_product(first_sequence, second_sequence): assert len(first_sequence) == len(second_sequence) assert len(first_sequence) <= 10 ** 3 assert all(0 <= f <= 10 ** 5 for f in first_sequence) assert all(0 <= s <= 10 ** 5 for s in second_sequence) type here if __name__ == '__main__': n = int(input()) prices = list(map(int, input().split())) clicks = list(map(int, input().split())) assert len(prices) == len(clicks) == n print(max_dot_product(prices, clicks))
2.484375
2
obswebsocket/requests.py
PanBartosz/obs-websocket-py
123
3320
#!/usr/bin/env python # -*- coding: utf-8 -*- # THIS FILE WAS GENERATED BY generate_classes.py - DO NOT EDIT # # (Generated on 2020-12-20 18:26:33.661372) # from .base_classes import Baserequests class GetVersion(Baserequests): """Returns the latest version of the plugin and the API. :Returns: *version* type: double OBSRemote compatible API version. Fixed to 1.1 for retrocompatibility. *obs_websocket_version* type: String obs-websocket plugin version. *obs_studio_version* type: String OBS Studio program version. *available_requests* type: String List of available request types, formatted as a comma-separated list string (e.g. : "Method1,Method2,Method3"). *supported_image_export_formats* type: String List of supported formats for features that use image export (like the TakeSourceScreenshot request type) formatted as a comma-separated list string """ def __init__(self): Baserequests.__init__(self) self.name = 'GetVersion' self.datain['version'] = None self.datain['obs-websocket-version'] = None self.datain['obs-studio-version'] = None self.datain['available-requests'] = None self.datain['supported-image-export-formats'] = None def getVersion(self): return self.datain['version'] def getObsWebsocketVersion(self): return self.datain['obs-websocket-version'] def getObsStudioVersion(self): return self.datain['obs-studio-version'] def getAvailableRequests(self): return self.datain['available-requests'] def getSupportedImageExportFormats(self): return self.datain['supported-image-export-formats'] class GetAuthRequired(Baserequests): """Tells the client if authentication is required. If so, returns authentication parameters `challenge` and `salt` (see "Authentication" for more information). :Returns: *authRequired* type: boolean Indicates whether authentication is required. *challenge* type: String (optional) *salt* type: String (optional) """ def __init__(self): Baserequests.__init__(self) self.name = 'GetAuthRequired' self.datain['authRequired'] = None self.datain['challenge'] = None self.datain['salt'] = None def getAuthRequired(self): return self.datain['authRequired'] def getChallenge(self): return self.datain['challenge'] def getSalt(self): return self.datain['salt'] class Authenticate(Baserequests): """Attempt to authenticate the client to the server. :Arguments: *auth* type: String Response to the auth challenge (see "Authentication" for more information). """ def __init__(self, auth): Baserequests.__init__(self) self.name = 'Authenticate' self.dataout['auth'] = auth class SetHeartbeat(Baserequests): """Enable/disable sending of the Heartbeat event :Arguments: *enable* type: boolean Starts/Stops emitting heartbeat messages """ def __init__(self, enable): Baserequests.__init__(self) self.name = 'SetHeartbeat' self.dataout['enable'] = enable class SetFilenameFormatting(Baserequests): """Set the filename formatting string :Arguments: *filename_formatting* type: String Filename formatting string to set. """ def __init__(self, filename_formatting): Baserequests.__init__(self) self.name = 'SetFilenameFormatting' self.dataout['filename-formatting'] = filename_formatting class GetFilenameFormatting(Baserequests): """Get the filename formatting string :Returns: *filename_formatting* type: String Current filename formatting string. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetFilenameFormatting' self.datain['filename-formatting'] = None def getFilenameFormatting(self): return self.datain['filename-formatting'] class GetStats(Baserequests): """Get OBS stats (almost the same info as provided in OBS' stats window) :Returns: *stats* type: OBSStats [OBS stats](#obsstats) """ def __init__(self): Baserequests.__init__(self) self.name = 'GetStats' self.datain['stats'] = None def getStats(self): return self.datain['stats'] class BroadcastCustomMessage(Baserequests): """Broadcast custom message to all connected WebSocket clients :Arguments: *realm* type: String Identifier to be choosen by the client *data* type: Object User-defined data """ def __init__(self, realm, data): Baserequests.__init__(self) self.name = 'BroadcastCustomMessage' self.dataout['realm'] = realm self.dataout['data'] = data class GetVideoInfo(Baserequests): """Get basic OBS video information :Returns: *baseWidth* type: int Base (canvas) width *baseHeight* type: int Base (canvas) height *outputWidth* type: int Output width *outputHeight* type: int Output height *scaleType* type: String Scaling method used if output size differs from base size *fps* type: double Frames rendered per second *videoFormat* type: String Video color format *colorSpace* type: String Color space for YUV *colorRange* type: String Color range (full or partial) """ def __init__(self): Baserequests.__init__(self) self.name = 'GetVideoInfo' self.datain['baseWidth'] = None self.datain['baseHeight'] = None self.datain['outputWidth'] = None self.datain['outputHeight'] = None self.datain['scaleType'] = None self.datain['fps'] = None self.datain['videoFormat'] = None self.datain['colorSpace'] = None self.datain['colorRange'] = None def getBaseWidth(self): return self.datain['baseWidth'] def getBaseHeight(self): return self.datain['baseHeight'] def getOutputWidth(self): return self.datain['outputWidth'] def getOutputHeight(self): return self.datain['outputHeight'] def getScaleType(self): return self.datain['scaleType'] def getFps(self): return self.datain['fps'] def getVideoFormat(self): return self.datain['videoFormat'] def getColorSpace(self): return self.datain['colorSpace'] def getColorRange(self): return self.datain['colorRange'] class OpenProjector(Baserequests): """Open a projector window or create a projector on a monitor. Requires OBS v24.0.4 or newer. :Arguments: *type* type: String (Optional) Type of projector: `Preview` (default), `Source`, `Scene`, `StudioProgram`, or `Multiview` (case insensitive). *monitor* type: int (Optional) Monitor to open the projector on. If -1 or omitted, opens a window. *geometry* type: String (Optional) Size and position of the projector window (only if monitor is -1). Encoded in Base64 using [Qt's geometry encoding](https://doc.qt.io/qt-5/qwidget.html#saveGeometry). Corresponds to OBS's saved projectors. *name* type: String (Optional) Name of the source or scene to be displayed (ignored for other projector types). """ def __init__(self, type, monitor, geometry, name): Baserequests.__init__(self) self.name = 'OpenProjector' self.dataout['type'] = type self.dataout['monitor'] = monitor self.dataout['geometry'] = geometry self.dataout['name'] = name class TriggerHotkeyByName(Baserequests): """Executes hotkey routine, identified by hotkey unique name :Arguments: *hotkeyName* type: String Unique name of the hotkey, as defined when registering the hotkey (e.g. "ReplayBuffer.Save") """ def __init__(self, hotkeyName): Baserequests.__init__(self) self.name = 'TriggerHotkeyByName' self.dataout['hotkeyName'] = hotkeyName class TriggerHotkeyBySequence(Baserequests): """Executes hotkey routine, identified by bound combination of keys. A single key combination might trigger multiple hotkey routines depending on user settings :Arguments: *keyId* type: String Main key identifier (e.g. `OBS_KEY_A` for key "A"). Available identifiers [here](https://github.com/obsproject/obs-studio/blob/master/libobs/obs-hotkeys.h) *keyModifiers* type: Object (Optional) Optional key modifiers object. False entries can be ommitted *keyModifiers.shift* type: boolean Trigger Shift Key *keyModifiers.alt* type: boolean Trigger Alt Key *keyModifiers.control* type: boolean Trigger Control (Ctrl) Key *keyModifiers.command* type: boolean Trigger Command Key (Mac) """ def __init__(self, keyId, keyModifiers): Baserequests.__init__(self) self.name = 'TriggerHotkeyBySequence' self.dataout['keyId'] = keyId self.dataout['keyModifiers'] = keyModifiers class PlayPauseMedia(Baserequests): """Pause or play a media source. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. *playPause* type: boolean Whether to pause or play the source. `false` for play, `true` for pause. """ def __init__(self, sourceName, playPause): Baserequests.__init__(self) self.name = 'PlayPauseMedia' self.dataout['sourceName'] = sourceName self.dataout['playPause'] = playPause class RestartMedia(Baserequests): """Restart a media source. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'RestartMedia' self.dataout['sourceName'] = sourceName class StopMedia(Baserequests): """Stop a media source. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'StopMedia' self.dataout['sourceName'] = sourceName class NextMedia(Baserequests): """Skip to the next media item in the playlist. Supports only vlc media source (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'NextMedia' self.dataout['sourceName'] = sourceName class PreviousMedia(Baserequests): """Go to the previous media item in the playlist. Supports only vlc media source (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'PreviousMedia' self.dataout['sourceName'] = sourceName class GetMediaDuration(Baserequests): """Get the length of media in milliseconds. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) Note: For some reason, for the first 5 or so seconds that the media is playing, the total duration can be off by upwards of 50ms. :Arguments: *sourceName* type: String Source name. :Returns: *mediaDuration* type: int The total length of media in milliseconds.. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'GetMediaDuration' self.datain['mediaDuration'] = None self.dataout['sourceName'] = sourceName def getMediaDuration(self): return self.datain['mediaDuration'] class GetMediaTime(Baserequests): """Get the current timestamp of media in milliseconds. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. :Returns: *timestamp* type: int The time in milliseconds since the start of the media. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'GetMediaTime' self.datain['timestamp'] = None self.dataout['sourceName'] = sourceName def getTimestamp(self): return self.datain['timestamp'] class SetMediaTime(Baserequests): """Set the timestamp of a media source. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. *timestamp* type: int Milliseconds to set the timestamp to. """ def __init__(self, sourceName, timestamp): Baserequests.__init__(self) self.name = 'SetMediaTime' self.dataout['sourceName'] = sourceName self.dataout['timestamp'] = timestamp class ScrubMedia(Baserequests): """Scrub media using a supplied offset. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) Note: Due to processing/network delays, this request is not perfect. The processing rate of this request has also not been tested. :Arguments: *sourceName* type: String Source name. *timeOffset* type: int Millisecond offset (positive or negative) to offset the current media position. """ def __init__(self, sourceName, timeOffset): Baserequests.__init__(self) self.name = 'ScrubMedia' self.dataout['sourceName'] = sourceName self.dataout['timeOffset'] = timeOffset class GetMediaState(Baserequests): """Get the current playing state of a media source. Supports ffmpeg and vlc media sources (as of OBS v25.0.8) :Arguments: *sourceName* type: String Source name. :Returns: *mediaState* type: String The media state of the provided source. States: `none`, `playing`, `opening`, `buffering`, `paused`, `stopped`, `ended`, `error`, `unknown` """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'GetMediaState' self.datain['mediaState'] = None self.dataout['sourceName'] = sourceName def getMediaState(self): return self.datain['mediaState'] class GetMediaSourcesList(Baserequests): """List the media state of all media sources (vlc and media source) :Returns: *mediaSources* type: Array<Object> Array of sources *mediaSources.*.sourceName* type: String Unique source name *mediaSources.*.sourceKind* type: String Unique source internal type (a.k.a `ffmpeg_source` or `vlc_source`) *mediaSources.*.mediaState* type: String The current state of media for that source. States: `none`, `playing`, `opening`, `buffering`, `paused`, `stopped`, `ended`, `error`, `unknown` """ def __init__(self): Baserequests.__init__(self) self.name = 'GetMediaSourcesList' self.datain['mediaSources'] = None def getMediaSources(self): return self.datain['mediaSources'] class CreateSource(Baserequests): """Create a source and add it as a sceneitem to a scene. :Arguments: *sourceName* type: String Source name. *sourceKind* type: String Source kind, Eg. `vlc_source`. *sceneName* type: String Scene to add the new source to. *sourceSettings* type: Object (optional) Source settings data. *setVisible* type: boolean (optional) Set the created SceneItem as visible or not. Defaults to true :Returns: *itemId* type: int ID of the SceneItem in the scene. """ def __init__(self, sourceName, sourceKind, sceneName, sourceSettings=None, setVisible=None): Baserequests.__init__(self) self.name = 'CreateSource' self.datain['itemId'] = None self.dataout['sourceName'] = sourceName self.dataout['sourceKind'] = sourceKind self.dataout['sceneName'] = sceneName self.dataout['sourceSettings'] = sourceSettings self.dataout['setVisible'] = setVisible def getItemId(self): return self.datain['itemId'] class GetSourcesList(Baserequests): """List all sources available in the running OBS instance :Returns: *sources* type: Array<Object> Array of sources *sources.*.name* type: String Unique source name *sources.*.typeId* type: String Non-unique source internal type (a.k.a kind) *sources.*.type* type: String Source type. Value is one of the following: "input", "filter", "transition", "scene" or "unknown" """ def __init__(self): Baserequests.__init__(self) self.name = 'GetSourcesList' self.datain['sources'] = None def getSources(self): return self.datain['sources'] class GetSourceTypesList(Baserequests): """Get a list of all available sources types :Returns: *types* type: Array<Object> Array of source types *types.*.typeId* type: String Non-unique internal source type ID *types.*.displayName* type: String Display name of the source type *types.*.type* type: String Type. Value is one of the following: "input", "filter", "transition" or "other" *types.*.defaultSettings* type: Object Default settings of this source type *types.*.caps* type: Object Source type capabilities *types.*.caps.isAsync* type: Boolean True if source of this type provide frames asynchronously *types.*.caps.hasVideo* type: Boolean True if sources of this type provide video *types.*.caps.hasAudio* type: Boolean True if sources of this type provide audio *types.*.caps.canInteract* type: Boolean True if interaction with this sources of this type is possible *types.*.caps.isComposite* type: Boolean True if sources of this type composite one or more sub-sources *types.*.caps.doNotDuplicate* type: Boolean True if sources of this type should not be fully duplicated *types.*.caps.doNotSelfMonitor* type: Boolean True if sources of this type may cause a feedback loop if it's audio is monitored and shouldn't be """ def __init__(self): Baserequests.__init__(self) self.name = 'GetSourceTypesList' self.datain['types'] = None def getTypes(self): return self.datain['types'] class GetVolume(Baserequests): """Get the volume of the specified source. Default response uses mul format, NOT SLIDER PERCENTAGE. :Arguments: *source* type: String Source name. *useDecibel* type: boolean (optional) Output volume in decibels of attenuation instead of amplitude/mul. :Returns: *name* type: String Source name. *volume* type: double Volume of the source. Between `0.0` and `20.0` if using mul, under `26.0` if using dB. *muted* type: boolean Indicates whether the source is muted. """ def __init__(self, source, useDecibel=None): Baserequests.__init__(self) self.name = 'GetVolume' self.datain['name'] = None self.datain['volume'] = None self.datain['muted'] = None self.dataout['source'] = source self.dataout['useDecibel'] = useDecibel def getName(self): return self.datain['name'] def getVolume(self): return self.datain['volume'] def getMuted(self): return self.datain['muted'] class SetVolume(Baserequests): """Set the volume of the specified source. Default request format uses mul, NOT SLIDER PERCENTAGE. :Arguments: *source* type: String Source name. *volume* type: double Desired volume. Must be between `0.0` and `20.0` for mul, and under 26.0 for dB. OBS will interpret dB values under -100.0 as Inf. Note: The OBS volume sliders only reach a maximum of 1.0mul/0.0dB, however OBS actually supports larger values. *useDecibel* type: boolean (optional) Interperet `volume` data as decibels instead of amplitude/mul. """ def __init__(self, source, volume, useDecibel=None): Baserequests.__init__(self) self.name = 'SetVolume' self.dataout['source'] = source self.dataout['volume'] = volume self.dataout['useDecibel'] = useDecibel class GetMute(Baserequests): """Get the mute status of a specified source. :Arguments: *source* type: String Source name. :Returns: *name* type: String Source name. *muted* type: boolean Mute status of the source. """ def __init__(self, source): Baserequests.__init__(self) self.name = 'GetMute' self.datain['name'] = None self.datain['muted'] = None self.dataout['source'] = source def getName(self): return self.datain['name'] def getMuted(self): return self.datain['muted'] class SetMute(Baserequests): """Sets the mute status of a specified source. :Arguments: *source* type: String Source name. *mute* type: boolean Desired mute status. """ def __init__(self, source, mute): Baserequests.__init__(self) self.name = 'SetMute' self.dataout['source'] = source self.dataout['mute'] = mute class ToggleMute(Baserequests): """Inverts the mute status of a specified source. :Arguments: *source* type: String Source name. """ def __init__(self, source): Baserequests.__init__(self) self.name = 'ToggleMute' self.dataout['source'] = source class GetAudioActive(Baserequests): """Get the audio's active status of a specified source. :Arguments: *sourceName* type: String Source name. :Returns: *audioActive* type: boolean Audio active status of the source. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'GetAudioActive' self.datain['audioActive'] = None self.dataout['sourceName'] = sourceName def getAudioActive(self): return self.datain['audioActive'] class SetSourceName(Baserequests): """ Note: If the new name already exists as a source, obs-websocket will return an error. :Arguments: *sourceName* type: String Source name. *newName* type: String New source name. """ def __init__(self, sourceName, newName): Baserequests.__init__(self) self.name = 'SetSourceName' self.dataout['sourceName'] = sourceName self.dataout['newName'] = newName class SetSyncOffset(Baserequests): """Set the audio sync offset of a specified source. :Arguments: *source* type: String Source name. *offset* type: int The desired audio sync offset (in nanoseconds). """ def __init__(self, source, offset): Baserequests.__init__(self) self.name = 'SetSyncOffset' self.dataout['source'] = source self.dataout['offset'] = offset class GetSyncOffset(Baserequests): """Get the audio sync offset of a specified source. :Arguments: *source* type: String Source name. :Returns: *name* type: String Source name. *offset* type: int The audio sync offset (in nanoseconds). """ def __init__(self, source): Baserequests.__init__(self) self.name = 'GetSyncOffset' self.datain['name'] = None self.datain['offset'] = None self.dataout['source'] = source def getName(self): return self.datain['name'] def getOffset(self): return self.datain['offset'] class GetSourceSettings(Baserequests): """Get settings of the specified source :Arguments: *sourceName* type: String Source name. *sourceType* type: String (optional) Type of the specified source. Useful for type-checking if you expect a specific settings schema. :Returns: *sourceName* type: String Source name *sourceType* type: String Type of the specified source *sourceSettings* type: Object Source settings (varies between source types, may require some probing around). """ def __init__(self, sourceName, sourceType=None): Baserequests.__init__(self) self.name = 'GetSourceSettings' self.datain['sourceName'] = None self.datain['sourceType'] = None self.datain['sourceSettings'] = None self.dataout['sourceName'] = sourceName self.dataout['sourceType'] = sourceType def getSourceName(self): return self.datain['sourceName'] def getSourceType(self): return self.datain['sourceType'] def getSourceSettings(self): return self.datain['sourceSettings'] class SetSourceSettings(Baserequests): """Set settings of the specified source. :Arguments: *sourceName* type: String Source name. *sourceType* type: String (optional) Type of the specified source. Useful for type-checking to avoid settings a set of settings incompatible with the actual source's type. *sourceSettings* type: Object Source settings (varies between source types, may require some probing around). :Returns: *sourceName* type: String Source name *sourceType* type: String Type of the specified source *sourceSettings* type: Object Updated source settings """ def __init__(self, sourceName, sourceSettings, sourceType=None): Baserequests.__init__(self) self.name = 'SetSourceSettings' self.datain['sourceName'] = None self.datain['sourceType'] = None self.datain['sourceSettings'] = None self.dataout['sourceName'] = sourceName self.dataout['sourceSettings'] = sourceSettings self.dataout['sourceType'] = sourceType def getSourceName(self): return self.datain['sourceName'] def getSourceType(self): return self.datain['sourceType'] def getSourceSettings(self): return self.datain['sourceSettings'] class GetTextGDIPlusProperties(Baserequests): """Get the current properties of a Text GDI Plus source. :Arguments: *source* type: String Source name. :Returns: *source* type: String Source name. *align* type: String Text Alignment ("left", "center", "right"). *bk_color* type: int Background color. *bk_opacity* type: int Background opacity (0-100). *chatlog* type: boolean Chat log. *chatlog_lines* type: int Chat log lines. *color* type: int Text color. *extents* type: boolean Extents wrap. *extents_cx* type: int Extents cx. *extents_cy* type: int Extents cy. *file* type: String File path name. *read_from_file* type: boolean Read text from the specified file. *font* type: Object Holds data for the font. Ex: `"font": { "face": "Arial", "flags": 0, "size": 150, "style": "" }` *font.face* type: String Font face. *font.flags* type: int Font text styling flag. `Bold=1, Italic=2, Bold Italic=3, Underline=5, Strikeout=8` *font.size* type: int Font text size. *font.style* type: String Font Style (unknown function). *gradient* type: boolean Gradient enabled. *gradient_color* type: int Gradient color. *gradient_dir* type: float Gradient direction. *gradient_opacity* type: int Gradient opacity (0-100). *outline* type: boolean Outline. *outline_color* type: int Outline color. *outline_size* type: int Outline size. *outline_opacity* type: int Outline opacity (0-100). *text* type: String Text content to be displayed. *valign* type: String Text vertical alignment ("top", "center", "bottom"). *vertical* type: boolean Vertical text enabled. """ def __init__(self, source): Baserequests.__init__(self) self.name = 'GetTextGDIPlusProperties' self.datain['source'] = None self.datain['align'] = None self.datain['bk_color'] = None self.datain['bk_opacity'] = None self.datain['chatlog'] = None self.datain['chatlog_lines'] = None self.datain['color'] = None self.datain['extents'] = None self.datain['extents_cx'] = None self.datain['extents_cy'] = None self.datain['file'] = None self.datain['read_from_file'] = None self.datain['font'] = None self.datain['gradient'] = None self.datain['gradient_color'] = None self.datain['gradient_dir'] = None self.datain['gradient_opacity'] = None self.datain['outline'] = None self.datain['outline_color'] = None self.datain['outline_size'] = None self.datain['outline_opacity'] = None self.datain['text'] = None self.datain['valign'] = None self.datain['vertical'] = None self.dataout['source'] = source def getSource(self): return self.datain['source'] def getAlign(self): return self.datain['align'] def getBk_color(self): return self.datain['bk_color'] def getBk_opacity(self): return self.datain['bk_opacity'] def getChatlog(self): return self.datain['chatlog'] def getChatlog_lines(self): return self.datain['chatlog_lines'] def getColor(self): return self.datain['color'] def getExtents(self): return self.datain['extents'] def getExtents_cx(self): return self.datain['extents_cx'] def getExtents_cy(self): return self.datain['extents_cy'] def getFile(self): return self.datain['file'] def getRead_from_file(self): return self.datain['read_from_file'] def getFont(self): return self.datain['font'] def getGradient(self): return self.datain['gradient'] def getGradient_color(self): return self.datain['gradient_color'] def getGradient_dir(self): return self.datain['gradient_dir'] def getGradient_opacity(self): return self.datain['gradient_opacity'] def getOutline(self): return self.datain['outline'] def getOutline_color(self): return self.datain['outline_color'] def getOutline_size(self): return self.datain['outline_size'] def getOutline_opacity(self): return self.datain['outline_opacity'] def getText(self): return self.datain['text'] def getValign(self): return self.datain['valign'] def getVertical(self): return self.datain['vertical'] class SetTextGDIPlusProperties(Baserequests): """Set the current properties of a Text GDI Plus source. :Arguments: *source* type: String Name of the source. *align* type: String (optional) Text Alignment ("left", "center", "right"). *bk_color* type: int (optional) Background color. *bk_opacity* type: int (optional) Background opacity (0-100). *chatlog* type: boolean (optional) Chat log. *chatlog_lines* type: int (optional) Chat log lines. *color* type: int (optional) Text color. *extents* type: boolean (optional) Extents wrap. *extents_cx* type: int (optional) Extents cx. *extents_cy* type: int (optional) Extents cy. *file* type: String (optional) File path name. *read_from_file* type: boolean (optional) Read text from the specified file. *font* type: Object (optional) Holds data for the font. Ex: `"font": { "face": "Arial", "flags": 0, "size": 150, "style": "" }` *font.face* type: String (optional) Font face. *font.flags* type: int (optional) Font text styling flag. `Bold=1, Italic=2, Bold Italic=3, Underline=5, Strikeout=8` *font.size* type: int (optional) Font text size. *font.style* type: String (optional) Font Style (unknown function). *gradient* type: boolean (optional) Gradient enabled. *gradient_color* type: int (optional) Gradient color. *gradient_dir* type: float (optional) Gradient direction. *gradient_opacity* type: int (optional) Gradient opacity (0-100). *outline* type: boolean (optional) Outline. *outline_color* type: int (optional) Outline color. *outline_size* type: int (optional) Outline size. *outline_opacity* type: int (optional) Outline opacity (0-100). *text* type: String (optional) Text content to be displayed. *valign* type: String (optional) Text vertical alignment ("top", "center", "bottom"). *vertical* type: boolean (optional) Vertical text enabled. *render* type: boolean (optional) Visibility of the scene item. """ def __init__(self, source, align=None, bk_color=None, bk_opacity=None, chatlog=None, chatlog_lines=None, color=None, extents=None, extents_cx=None, extents_cy=None, file=None, read_from_file=None, font=None, gradient=None, gradient_color=None, gradient_dir=None, gradient_opacity=None, outline=None, outline_color=None, outline_size=None, outline_opacity=None, text=None, valign=None, vertical=None, render=None): Baserequests.__init__(self) self.name = 'SetTextGDIPlusProperties' self.dataout['source'] = source self.dataout['align'] = align self.dataout['bk_color'] = bk_color self.dataout['bk_opacity'] = bk_opacity self.dataout['chatlog'] = chatlog self.dataout['chatlog_lines'] = chatlog_lines self.dataout['color'] = color self.dataout['extents'] = extents self.dataout['extents_cx'] = extents_cx self.dataout['extents_cy'] = extents_cy self.dataout['file'] = file self.dataout['read_from_file'] = read_from_file self.dataout['font'] = font self.dataout['gradient'] = gradient self.dataout['gradient_color'] = gradient_color self.dataout['gradient_dir'] = gradient_dir self.dataout['gradient_opacity'] = gradient_opacity self.dataout['outline'] = outline self.dataout['outline_color'] = outline_color self.dataout['outline_size'] = outline_size self.dataout['outline_opacity'] = outline_opacity self.dataout['text'] = text self.dataout['valign'] = valign self.dataout['vertical'] = vertical self.dataout['render'] = render class GetTextFreetype2Properties(Baserequests): """Get the current properties of a Text Freetype 2 source. :Arguments: *source* type: String Source name. :Returns: *source* type: String Source name *color1* type: int Gradient top color. *color2* type: int Gradient bottom color. *custom_width* type: int Custom width (0 to disable). *drop_shadow* type: boolean Drop shadow. *font* type: Object Holds data for the font. Ex: `"font": { "face": "Arial", "flags": 0, "size": 150, "style": "" }` *font.face* type: String Font face. *font.flags* type: int Font text styling flag. `Bold=1, Italic=2, Bold Italic=3, Underline=5, Strikeout=8` *font.size* type: int Font text size. *font.style* type: String Font Style (unknown function). *from_file* type: boolean Read text from the specified file. *log_mode* type: boolean Chat log. *outline* type: boolean Outline. *text* type: String Text content to be displayed. *text_file* type: String File path. *word_wrap* type: boolean Word wrap. """ def __init__(self, source): Baserequests.__init__(self) self.name = 'GetTextFreetype2Properties' self.datain['source'] = None self.datain['color1'] = None self.datain['color2'] = None self.datain['custom_width'] = None self.datain['drop_shadow'] = None self.datain['font'] = None self.datain['from_file'] = None self.datain['log_mode'] = None self.datain['outline'] = None self.datain['text'] = None self.datain['text_file'] = None self.datain['word_wrap'] = None self.dataout['source'] = source def getSource(self): return self.datain['source'] def getColor1(self): return self.datain['color1'] def getColor2(self): return self.datain['color2'] def getCustom_width(self): return self.datain['custom_width'] def getDrop_shadow(self): return self.datain['drop_shadow'] def getFont(self): return self.datain['font'] def getFrom_file(self): return self.datain['from_file'] def getLog_mode(self): return self.datain['log_mode'] def getOutline(self): return self.datain['outline'] def getText(self): return self.datain['text'] def getText_file(self): return self.datain['text_file'] def getWord_wrap(self): return self.datain['word_wrap'] class SetTextFreetype2Properties(Baserequests): """Set the current properties of a Text Freetype 2 source. :Arguments: *source* type: String Source name. *color1* type: int (optional) Gradient top color. *color2* type: int (optional) Gradient bottom color. *custom_width* type: int (optional) Custom width (0 to disable). *drop_shadow* type: boolean (optional) Drop shadow. *font* type: Object (optional) Holds data for the font. Ex: `"font": { "face": "Arial", "flags": 0, "size": 150, "style": "" }` *font.face* type: String (optional) Font face. *font.flags* type: int (optional) Font text styling flag. `Bold=1, Italic=2, Bold Italic=3, Underline=5, Strikeout=8` *font.size* type: int (optional) Font text size. *font.style* type: String (optional) Font Style (unknown function). *from_file* type: boolean (optional) Read text from the specified file. *log_mode* type: boolean (optional) Chat log. *outline* type: boolean (optional) Outline. *text* type: String (optional) Text content to be displayed. *text_file* type: String (optional) File path. *word_wrap* type: boolean (optional) Word wrap. """ def __init__(self, source, color1=None, color2=None, custom_width=None, drop_shadow=None, font=None, from_file=None, log_mode=None, outline=None, text=None, text_file=None, word_wrap=None): Baserequests.__init__(self) self.name = 'SetTextFreetype2Properties' self.dataout['source'] = source self.dataout['color1'] = color1 self.dataout['color2'] = color2 self.dataout['custom_width'] = custom_width self.dataout['drop_shadow'] = drop_shadow self.dataout['font'] = font self.dataout['from_file'] = from_file self.dataout['log_mode'] = log_mode self.dataout['outline'] = outline self.dataout['text'] = text self.dataout['text_file'] = text_file self.dataout['word_wrap'] = word_wrap class GetBrowserSourceProperties(Baserequests): """Get current properties for a Browser Source. :Arguments: *source* type: String Source name. :Returns: *source* type: String Source name. *is_local_file* type: boolean Indicates that a local file is in use. *local_file* type: String file path. *url* type: String Url. *css* type: String CSS to inject. *width* type: int Width. *height* type: int Height. *fps* type: int Framerate. *shutdown* type: boolean Indicates whether the source should be shutdown when not visible. """ def __init__(self, source): Baserequests.__init__(self) self.name = 'GetBrowserSourceProperties' self.datain['source'] = None self.datain['is_local_file'] = None self.datain['local_file'] = None self.datain['url'] = None self.datain['css'] = None self.datain['width'] = None self.datain['height'] = None self.datain['fps'] = None self.datain['shutdown'] = None self.dataout['source'] = source def getSource(self): return self.datain['source'] def getIs_local_file(self): return self.datain['is_local_file'] def getLocal_file(self): return self.datain['local_file'] def getUrl(self): return self.datain['url'] def getCss(self): return self.datain['css'] def getWidth(self): return self.datain['width'] def getHeight(self): return self.datain['height'] def getFps(self): return self.datain['fps'] def getShutdown(self): return self.datain['shutdown'] class SetBrowserSourceProperties(Baserequests): """Set current properties for a Browser Source. :Arguments: *source* type: String Name of the source. *is_local_file* type: boolean (optional) Indicates that a local file is in use. *local_file* type: String (optional) file path. *url* type: String (optional) Url. *css* type: String (optional) CSS to inject. *width* type: int (optional) Width. *height* type: int (optional) Height. *fps* type: int (optional) Framerate. *shutdown* type: boolean (optional) Indicates whether the source should be shutdown when not visible. *render* type: boolean (optional) Visibility of the scene item. """ def __init__(self, source, is_local_file=None, local_file=None, url=None, css=None, width=None, height=None, fps=None, shutdown=None, render=None): Baserequests.__init__(self) self.name = 'SetBrowserSourceProperties' self.dataout['source'] = source self.dataout['is_local_file'] = is_local_file self.dataout['local_file'] = local_file self.dataout['url'] = url self.dataout['css'] = css self.dataout['width'] = width self.dataout['height'] = height self.dataout['fps'] = fps self.dataout['shutdown'] = shutdown self.dataout['render'] = render class GetSpecialSources(Baserequests): """Get configured special sources like Desktop Audio and Mic/Aux sources. :Returns: *desktop_1* type: String (optional) Name of the first Desktop Audio capture source. *desktop_2* type: String (optional) Name of the second Desktop Audio capture source. *mic_1* type: String (optional) Name of the first Mic/Aux input source. *mic_2* type: String (optional) Name of the second Mic/Aux input source. *mic_3* type: String (optional) NAme of the third Mic/Aux input source. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetSpecialSources' self.datain['desktop-1'] = None self.datain['desktop-2'] = None self.datain['mic-1'] = None self.datain['mic-2'] = None self.datain['mic-3'] = None def getDesktop1(self): return self.datain['desktop-1'] def getDesktop2(self): return self.datain['desktop-2'] def getMic1(self): return self.datain['mic-1'] def getMic2(self): return self.datain['mic-2'] def getMic3(self): return self.datain['mic-3'] class GetSourceFilters(Baserequests): """List filters applied to a source :Arguments: *sourceName* type: String Source name :Returns: *filters* type: Array<Object> List of filters for the specified source *filters.*.enabled* type: Boolean Filter status (enabled or not) *filters.*.type* type: String Filter type *filters.*.name* type: String Filter name *filters.*.settings* type: Object Filter settings """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'GetSourceFilters' self.datain['filters'] = None self.dataout['sourceName'] = sourceName def getFilters(self): return self.datain['filters'] class GetSourceFilterInfo(Baserequests): """List filters applied to a source :Arguments: *sourceName* type: String Source name *filterName* type: String Source filter name :Returns: *enabled* type: Boolean Filter status (enabled or not) *type* type: String Filter type *name* type: String Filter name *settings* type: Object Filter settings """ def __init__(self, sourceName, filterName): Baserequests.__init__(self) self.name = 'GetSourceFilterInfo' self.datain['enabled'] = None self.datain['type'] = None self.datain['name'] = None self.datain['settings'] = None self.dataout['sourceName'] = sourceName self.dataout['filterName'] = filterName def getEnabled(self): return self.datain['enabled'] def getType(self): return self.datain['type'] def getName(self): return self.datain['name'] def getSettings(self): return self.datain['settings'] class AddFilterToSource(Baserequests): """Add a new filter to a source. Available source types along with their settings properties are available from `GetSourceTypesList`. :Arguments: *sourceName* type: String Name of the source on which the filter is added *filterName* type: String Name of the new filter *filterType* type: String Filter type *filterSettings* type: Object Filter settings """ def __init__(self, sourceName, filterName, filterType, filterSettings): Baserequests.__init__(self) self.name = 'AddFilterToSource' self.dataout['sourceName'] = sourceName self.dataout['filterName'] = filterName self.dataout['filterType'] = filterType self.dataout['filterSettings'] = filterSettings class RemoveFilterFromSource(Baserequests): """Remove a filter from a source :Arguments: *sourceName* type: String Name of the source from which the specified filter is removed *filterName* type: String Name of the filter to remove """ def __init__(self, sourceName, filterName): Baserequests.__init__(self) self.name = 'RemoveFilterFromSource' self.dataout['sourceName'] = sourceName self.dataout['filterName'] = filterName class ReorderSourceFilter(Baserequests): """Move a filter in the chain (absolute index positioning) :Arguments: *sourceName* type: String Name of the source to which the filter belongs *filterName* type: String Name of the filter to reorder *newIndex* type: Integer Desired position of the filter in the chain """ def __init__(self, sourceName, filterName, newIndex): Baserequests.__init__(self) self.name = 'ReorderSourceFilter' self.dataout['sourceName'] = sourceName self.dataout['filterName'] = filterName self.dataout['newIndex'] = newIndex class MoveSourceFilter(Baserequests): """Move a filter in the chain (relative positioning) :Arguments: *sourceName* type: String Name of the source to which the filter belongs *filterName* type: String Name of the filter to reorder *movementType* type: String How to move the filter around in the source's filter chain. Either "up", "down", "top" or "bottom". """ def __init__(self, sourceName, filterName, movementType): Baserequests.__init__(self) self.name = 'MoveSourceFilter' self.dataout['sourceName'] = sourceName self.dataout['filterName'] = filterName self.dataout['movementType'] = movementType class SetSourceFilterSettings(Baserequests): """Update settings of a filter :Arguments: *sourceName* type: String Name of the source to which the filter belongs *filterName* type: String Name of the filter to reconfigure *filterSettings* type: Object New settings. These will be merged to the current filter settings. """ def __init__(self, sourceName, filterName, filterSettings): Baserequests.__init__(self) self.name = 'SetSourceFilterSettings' self.dataout['sourceName'] = sourceName self.dataout['filterName'] = filterName self.dataout['filterSettings'] = filterSettings class SetSourceFilterVisibility(Baserequests): """Change the visibility/enabled state of a filter :Arguments: *sourceName* type: String Source name *filterName* type: String Source filter name *filterEnabled* type: Boolean New filter state """ def __init__(self, sourceName, filterName, filterEnabled): Baserequests.__init__(self) self.name = 'SetSourceFilterVisibility' self.dataout['sourceName'] = sourceName self.dataout['filterName'] = filterName self.dataout['filterEnabled'] = filterEnabled class GetAudioMonitorType(Baserequests): """Get the audio monitoring type of the specified source. :Arguments: *sourceName* type: String Source name. :Returns: *monitorType* type: String The monitor type in use. Options: `none`, `monitorOnly`, `monitorAndOutput`. """ def __init__(self, sourceName): Baserequests.__init__(self) self.name = 'GetAudioMonitorType' self.datain['monitorType'] = None self.dataout['sourceName'] = sourceName def getMonitorType(self): return self.datain['monitorType'] class SetAudioMonitorType(Baserequests): """Set the audio monitoring type of the specified source. :Arguments: *sourceName* type: String Source name. *monitorType* type: String The monitor type to use. Options: `none`, `monitorOnly`, `monitorAndOutput`. """ def __init__(self, sourceName, monitorType): Baserequests.__init__(self) self.name = 'SetAudioMonitorType' self.dataout['sourceName'] = sourceName self.dataout['monitorType'] = monitorType class TakeSourceScreenshot(Baserequests): """ At least `embedPictureFormat` or `saveToFilePath` must be specified. Clients can specify `width` and `height` parameters to receive scaled pictures. Aspect ratio is preserved if only one of these two parameters is specified. :Arguments: *sourceName* type: String (optional) Source name. Note that, since scenes are also sources, you can also provide a scene name. If not provided, the currently active scene is used. *embedPictureFormat* type: String (optional) Format of the Data URI encoded picture. Can be "png", "jpg", "jpeg" or "bmp" (or any other value supported by Qt's Image module) *saveToFilePath* type: String (optional) Full file path (file extension included) where the captured image is to be saved. Can be in a format different from `pictureFormat`. Can be a relative path. *fileFormat* type: String (optional) Format to save the image file as (one of the values provided in the `supported-image-export-formats` response field of `GetVersion`). If not specified, tries to guess based on file extension. *compressionQuality* type: int (optional) Compression ratio between -1 and 100 to write the image with. -1 is automatic, 1 is smallest file/most compression, 100 is largest file/least compression. Varies with image type. *width* type: int (optional) Screenshot width. Defaults to the source's base width. *height* type: int (optional) Screenshot height. Defaults to the source's base height. :Returns: *sourceName* type: String Source name *img* type: String Image Data URI (if `embedPictureFormat` was specified in the request) *imageFile* type: String Absolute path to the saved image file (if `saveToFilePath` was specified in the request) """ def __init__(self, sourceName=None, embedPictureFormat=None, saveToFilePath=None, fileFormat=None, compressionQuality=None, width=None, height=None): Baserequests.__init__(self) self.name = 'TakeSourceScreenshot' self.datain['sourceName'] = None self.datain['img'] = None self.datain['imageFile'] = None self.dataout['sourceName'] = sourceName self.dataout['embedPictureFormat'] = embedPictureFormat self.dataout['saveToFilePath'] = saveToFilePath self.dataout['fileFormat'] = fileFormat self.dataout['compressionQuality'] = compressionQuality self.dataout['width'] = width self.dataout['height'] = height def getSourceName(self): return self.datain['sourceName'] def getImg(self): return self.datain['img'] def getImageFile(self): return self.datain['imageFile'] class ListOutputs(Baserequests): """List existing outputs :Returns: *outputs* type: Array<Output> Outputs list """ def __init__(self): Baserequests.__init__(self) self.name = 'ListOutputs' self.datain['outputs'] = None def getOutputs(self): return self.datain['outputs'] class GetOutputInfo(Baserequests): """Get information about a single output :Arguments: *outputName* type: String Output name :Returns: *outputInfo* type: Output Output info """ def __init__(self, outputName): Baserequests.__init__(self) self.name = 'GetOutputInfo' self.datain['outputInfo'] = None self.dataout['outputName'] = outputName def getOutputInfo(self): return self.datain['outputInfo'] class StartOutput(Baserequests): """ Note: Controlling outputs is an experimental feature of obs-websocket. Some plugins which add outputs to OBS may not function properly when they are controlled in this way. :Arguments: *outputName* type: String Output name """ def __init__(self, outputName): Baserequests.__init__(self) self.name = 'StartOutput' self.dataout['outputName'] = outputName class StopOutput(Baserequests): """ Note: Controlling outputs is an experimental feature of obs-websocket. Some plugins which add outputs to OBS may not function properly when they are controlled in this way. :Arguments: *outputName* type: String Output name *force* type: boolean (optional) Force stop (default: false) """ def __init__(self, outputName, force=None): Baserequests.__init__(self) self.name = 'StopOutput' self.dataout['outputName'] = outputName self.dataout['force'] = force class SetCurrentProfile(Baserequests): """Set the currently active profile. :Arguments: *profile_name* type: String Name of the desired profile. """ def __init__(self, profile_name): Baserequests.__init__(self) self.name = 'SetCurrentProfile' self.dataout['profile-name'] = profile_name class GetCurrentProfile(Baserequests): """Get the name of the current profile. :Returns: *profile_name* type: String Name of the currently active profile. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetCurrentProfile' self.datain['profile-name'] = None def getProfileName(self): return self.datain['profile-name'] class ListProfiles(Baserequests): """Get a list of available profiles. :Returns: *profiles* type: Array<Object> List of available profiles. *profiles.*.profile_name* type: String Filter name """ def __init__(self): Baserequests.__init__(self) self.name = 'ListProfiles' self.datain['profiles'] = None def getProfiles(self): return self.datain['profiles'] class GetRecordingStatus(Baserequests): """Get current recording status. :Returns: *isRecording* type: boolean Current recording status. *isRecordingPaused* type: boolean Whether the recording is paused or not. *recordTimecode* type: String (optional) Time elapsed since recording started (only present if currently recording). *recordingFilename* type: String (optional) Absolute path to the recording file (only present if currently recording). """ def __init__(self): Baserequests.__init__(self) self.name = 'GetRecordingStatus' self.datain['isRecording'] = None self.datain['isRecordingPaused'] = None self.datain['recordTimecode'] = None self.datain['recordingFilename'] = None def getIsRecording(self): return self.datain['isRecording'] def getIsRecordingPaused(self): return self.datain['isRecordingPaused'] def getRecordTimecode(self): return self.datain['recordTimecode'] def getRecordingFilename(self): return self.datain['recordingFilename'] class StartStopRecording(Baserequests): """Toggle recording on or off (depending on the current recording state). """ def __init__(self): Baserequests.__init__(self) self.name = 'StartStopRecording' class StartRecording(Baserequests): """Start recording. Will return an `error` if recording is already active. """ def __init__(self): Baserequests.__init__(self) self.name = 'StartRecording' class StopRecording(Baserequests): """Stop recording. Will return an `error` if recording is not active. """ def __init__(self): Baserequests.__init__(self) self.name = 'StopRecording' class PauseRecording(Baserequests): """Pause the current recording. Returns an error if recording is not active or already paused. """ def __init__(self): Baserequests.__init__(self) self.name = 'PauseRecording' class ResumeRecording(Baserequests): """Resume/unpause the current recording (if paused). Returns an error if recording is not active or not paused. """ def __init__(self): Baserequests.__init__(self) self.name = 'ResumeRecording' class SetRecordingFolder(Baserequests): """ Please note: if `SetRecordingFolder` is called while a recording is in progress, the change won't be applied immediately and will be effective on the next recording. :Arguments: *rec_folder* type: String Path of the recording folder. """ def __init__(self, rec_folder): Baserequests.__init__(self) self.name = 'SetRecordingFolder' self.dataout['rec-folder'] = rec_folder class GetRecordingFolder(Baserequests): """Get the path of the current recording folder. :Returns: *rec_folder* type: String Path of the recording folder. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetRecordingFolder' self.datain['rec-folder'] = None def getRecFolder(self): return self.datain['rec-folder'] class GetReplayBufferStatus(Baserequests): """Get the status of the OBS replay buffer. :Returns: *isReplayBufferActive* type: boolean Current recording status. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetReplayBufferStatus' self.datain['isReplayBufferActive'] = None def getIsReplayBufferActive(self): return self.datain['isReplayBufferActive'] class StartStopReplayBuffer(Baserequests): """Toggle the Replay Buffer on/off (depending on the current state of the replay buffer). """ def __init__(self): Baserequests.__init__(self) self.name = 'StartStopReplayBuffer' class StartReplayBuffer(Baserequests): """Start recording into the Replay Buffer. Will return an `error` if the Replay Buffer is already active or if the "Save Replay Buffer" hotkey is not set in OBS' settings. Setting this hotkey is mandatory, even when triggering saves only through obs-websocket. """ def __init__(self): Baserequests.__init__(self) self.name = 'StartReplayBuffer' class StopReplayBuffer(Baserequests): """Stop recording into the Replay Buffer. Will return an `error` if the Replay Buffer is not active. """ def __init__(self): Baserequests.__init__(self) self.name = 'StopReplayBuffer' class SaveReplayBuffer(Baserequests): """Flush and save the contents of the Replay Buffer to disk. This is basically the same as triggering the "Save Replay Buffer" hotkey. Will return an `error` if the Replay Buffer is not active. """ def __init__(self): Baserequests.__init__(self) self.name = 'SaveReplayBuffer' class SetCurrentSceneCollection(Baserequests): """Change the active scene collection. :Arguments: *sc_name* type: String Name of the desired scene collection. """ def __init__(self, sc_name): Baserequests.__init__(self) self.name = 'SetCurrentSceneCollection' self.dataout['sc-name'] = sc_name class GetCurrentSceneCollection(Baserequests): """Get the name of the current scene collection. :Returns: *sc_name* type: String Name of the currently active scene collection. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetCurrentSceneCollection' self.datain['sc-name'] = None def getScName(self): return self.datain['sc-name'] class ListSceneCollections(Baserequests): """List available scene collections :Returns: *scene_collections* type: Array<String> Scene collections list *scene_collections.*.sc_name* type: String Scene collection name """ def __init__(self): Baserequests.__init__(self) self.name = 'ListSceneCollections' self.datain['scene-collections'] = None def getSceneCollections(self): return self.datain['scene-collections'] class GetSceneItemList(Baserequests): """Get a list of all scene items in a scene. :Arguments: *sceneName* type: String (optional) Name of the scene to get the list of scene items from. Defaults to the current scene if not specified. :Returns: *sceneName* type: String Name of the requested (or current) scene *sceneItems* type: Array<Object> Array of scene items *sceneItems.*.itemId* type: int Unique item id of the source item *sceneItems.*.sourceKind* type: String ID if the scene item's source. For example `vlc_source` or `image_source` *sceneItems.*.sourceName* type: String Name of the scene item's source *sceneItems.*.sourceType* type: String Type of the scene item's source. Either `input`, `group`, or `scene` """ def __init__(self, sceneName=None): Baserequests.__init__(self) self.name = 'GetSceneItemList' self.datain['sceneName'] = None self.datain['sceneItems'] = None self.dataout['sceneName'] = sceneName def getSceneName(self): return self.datain['sceneName'] def getSceneItems(self): return self.datain['sceneItems'] class GetSceneItemProperties(Baserequests): """Gets the scene specific properties of the specified source item. Coordinates are relative to the item's parent (the scene or group it belongs to). :Arguments: *scene_name* type: String (optional) Name of the scene the scene item belongs to. Defaults to the current scene. *item* type: String | Object Scene Item name (if this field is a string) or specification (if it is an object). *item.name* type: String (optional) Scene Item name (if the `item` field is an object) *item.id* type: int (optional) Scene Item ID (if the `item` field is an object) :Returns: *name* type: String Scene Item name. *itemId* type: int Scene Item ID. *position.x* type: double The x position of the source from the left. *position.y* type: double The y position of the source from the top. *position.alignment* type: int The point on the source that the item is manipulated from. The sum of 1=Left or 2=Right, and 4=Top or 8=Bottom, or omit to center on that axis. *rotation* type: double The clockwise rotation of the item in degrees around the point of alignment. *scale.x* type: double The x-scale factor of the source. *scale.y* type: double The y-scale factor of the source. *crop.top* type: int The number of pixels cropped off the top of the source before scaling. *crop.right* type: int The number of pixels cropped off the right of the source before scaling. *crop.bottom* type: int The number of pixels cropped off the bottom of the source before scaling. *crop.left* type: int The number of pixels cropped off the left of the source before scaling. *visible* type: bool If the source is visible. *muted* type: bool If the source is muted. *locked* type: bool If the source's transform is locked. *bounds.type* type: String Type of bounding box. Can be "OBS_BOUNDS_STRETCH", "OBS_BOUNDS_SCALE_INNER", "OBS_BOUNDS_SCALE_OUTER", "OBS_BOUNDS_SCALE_TO_WIDTH", "OBS_BOUNDS_SCALE_TO_HEIGHT", "OBS_BOUNDS_MAX_ONLY" or "OBS_BOUNDS_NONE". *bounds.alignment* type: int Alignment of the bounding box. *bounds.x* type: double Width of the bounding box. *bounds.y* type: double Height of the bounding box. *sourceWidth* type: int Base width (without scaling) of the source *sourceHeight* type: int Base source (without scaling) of the source *width* type: double Scene item width (base source width multiplied by the horizontal scaling factor) *height* type: double Scene item height (base source height multiplied by the vertical scaling factor) *parentGroupName* type: String (optional) Name of the item's parent (if this item belongs to a group) *groupChildren* type: Array<SceneItemTransform> (optional) List of children (if this item is a group) """ def __init__(self, item, scene_name=None): Baserequests.__init__(self) self.name = 'GetSceneItemProperties' self.datain['name'] = None self.datain['itemId'] = None self.datain['position'] = None self.datain['rotation'] = None self.datain['scale'] = None self.datain['crop'] = None self.datain['visible'] = None self.datain['muted'] = None self.datain['locked'] = None self.datain['bounds'] = None self.datain['sourceWidth'] = None self.datain['sourceHeight'] = None self.datain['width'] = None self.datain['height'] = None self.datain['parentGroupName'] = None self.datain['groupChildren'] = None self.dataout['item'] = item self.dataout['scene-name'] = scene_name def getName(self): return self.datain['name'] def getItemId(self): return self.datain['itemId'] def getPosition(self): return self.datain['position'] def getRotation(self): return self.datain['rotation'] def getScale(self): return self.datain['scale'] def getCrop(self): return self.datain['crop'] def getVisible(self): return self.datain['visible'] def getMuted(self): return self.datain['muted'] def getLocked(self): return self.datain['locked'] def getBounds(self): return self.datain['bounds'] def getSourceWidth(self): return self.datain['sourceWidth'] def getSourceHeight(self): return self.datain['sourceHeight'] def getWidth(self): return self.datain['width'] def getHeight(self): return self.datain['height'] def getParentGroupName(self): return self.datain['parentGroupName'] def getGroupChildren(self): return self.datain['groupChildren'] class SetSceneItemProperties(Baserequests): """Sets the scene specific properties of a source. Unspecified properties will remain unchanged. Coordinates are relative to the item's parent (the scene or group it belongs to). :Arguments: *scene_name* type: String (optional) Name of the scene the source item belongs to. Defaults to the current scene. *item* type: String | Object Scene Item name (if this field is a string) or specification (if it is an object). *item.name* type: String (optional) Scene Item name (if the `item` field is an object) *item.id* type: int (optional) Scene Item ID (if the `item` field is an object) *position.x* type: double (optional) The new x position of the source. *position.y* type: double (optional) The new y position of the source. *position.alignment* type: int (optional) The new alignment of the source. *rotation* type: double (optional) The new clockwise rotation of the item in degrees. *scale.x* type: double (optional) The new x scale of the item. *scale.y* type: double (optional) The new y scale of the item. *crop.top* type: int (optional) The new amount of pixels cropped off the top of the source before scaling. *crop.bottom* type: int (optional) The new amount of pixels cropped off the bottom of the source before scaling. *crop.left* type: int (optional) The new amount of pixels cropped off the left of the source before scaling. *crop.right* type: int (optional) The new amount of pixels cropped off the right of the source before scaling. *visible* type: bool (optional) The new visibility of the source. 'true' shows source, 'false' hides source. *locked* type: bool (optional) The new locked status of the source. 'true' keeps it in its current position, 'false' allows movement. *bounds.type* type: String (optional) The new bounds type of the source. Can be "OBS_BOUNDS_STRETCH", "OBS_BOUNDS_SCALE_INNER", "OBS_BOUNDS_SCALE_OUTER", "OBS_BOUNDS_SCALE_TO_WIDTH", "OBS_BOUNDS_SCALE_TO_HEIGHT", "OBS_BOUNDS_MAX_ONLY" or "OBS_BOUNDS_NONE". *bounds.alignment* type: int (optional) The new alignment of the bounding box. (0-2, 4-6, 8-10) *bounds.x* type: double (optional) The new width of the bounding box. *bounds.y* type: double (optional) The new height of the bounding box. """ def __init__(self, item, scene_name=None, position=None, rotation=None, scale=None, crop=None, visible=None, locked=None, bounds=None): Baserequests.__init__(self) self.name = 'SetSceneItemProperties' self.dataout['item'] = item self.dataout['scene-name'] = scene_name self.dataout['position'] = position self.dataout['rotation'] = rotation self.dataout['scale'] = scale self.dataout['crop'] = crop self.dataout['visible'] = visible self.dataout['locked'] = locked self.dataout['bounds'] = bounds class ResetSceneItem(Baserequests): """Reset a scene item. :Arguments: *scene_name* type: String (optional) Name of the scene the scene item belongs to. Defaults to the current scene. *item* type: String | Object Scene Item name (if this field is a string) or specification (if it is an object). *item.name* type: String (optional) Scene Item name (if the `item` field is an object) *item.id* type: int (optional) Scene Item ID (if the `item` field is an object) """ def __init__(self, item, scene_name=None): Baserequests.__init__(self) self.name = 'ResetSceneItem' self.dataout['item'] = item self.dataout['scene-name'] = scene_name class SetSceneItemRender(Baserequests): """Show or hide a specified source item in a specified scene. :Arguments: *scene_name* type: String (optional) Name of the scene the scene item belongs to. Defaults to the currently active scene. *source* type: String Scene Item name. *render* type: boolean true = shown ; false = hidden """ def __init__(self, source, render, scene_name=None): Baserequests.__init__(self) self.name = 'SetSceneItemRender' self.dataout['source'] = source self.dataout['render'] = render self.dataout['scene-name'] = scene_name class SetSceneItemPosition(Baserequests): """Sets the coordinates of a specified source item. :Arguments: *scene_name* type: String (optional) Name of the scene the scene item belongs to. Defaults to the current scene. *item* type: String Scene Item name. *x* type: double X coordinate. *y* type: double Y coordinate. """ def __init__(self, item, x, y, scene_name=None): Baserequests.__init__(self) self.name = 'SetSceneItemPosition' self.dataout['item'] = item self.dataout['x'] = x self.dataout['y'] = y self.dataout['scene-name'] = scene_name class SetSceneItemTransform(Baserequests): """Set the transform of the specified source item. :Arguments: *scene_name* type: String (optional) Name of the scene the scene item belongs to. Defaults to the current scene. *item* type: String Scene Item name. *x_scale* type: double Width scale factor. *y_scale* type: double Height scale factor. *rotation* type: double Source item rotation (in degrees). """ def __init__(self, item, x_scale, y_scale, rotation, scene_name=None): Baserequests.__init__(self) self.name = 'SetSceneItemTransform' self.dataout['item'] = item self.dataout['x-scale'] = x_scale self.dataout['y-scale'] = y_scale self.dataout['rotation'] = rotation self.dataout['scene-name'] = scene_name class SetSceneItemCrop(Baserequests): """Sets the crop coordinates of the specified source item. :Arguments: *scene_name* type: String (optional) Name of the scene the scene item belongs to. Defaults to the current scene. *item* type: String Scene Item name. *top* type: int Pixel position of the top of the source item. *bottom* type: int Pixel position of the bottom of the source item. *left* type: int Pixel position of the left of the source item. *right* type: int Pixel position of the right of the source item. """ def __init__(self, item, top, bottom, left, right, scene_name=None): Baserequests.__init__(self) self.name = 'SetSceneItemCrop' self.dataout['item'] = item self.dataout['top'] = top self.dataout['bottom'] = bottom self.dataout['left'] = left self.dataout['right'] = right self.dataout['scene-name'] = scene_name class DeleteSceneItem(Baserequests): """Deletes a scene item. :Arguments: *scene* type: String (optional) Name of the scene the scene item belongs to. Defaults to the current scene. *item* type: Object Scene item to delete (required) *item.name* type: String Scene Item name (prefer `id`, including both is acceptable). *item.id* type: int Scene Item ID. """ def __init__(self, item, scene=None): Baserequests.__init__(self) self.name = 'DeleteSceneItem' self.dataout['item'] = item self.dataout['scene'] = scene class AddSceneItem(Baserequests): """Creates a scene item in a scene. In other words, this is how you add a source into a scene. :Arguments: *sceneName* type: String Name of the scene to create the scene item in *sourceName* type: String Name of the source to be added *setVisible* type: boolean Whether to make the sceneitem visible on creation or not. Default `true` :Returns: *itemId* type: int Numerical ID of the created scene item """ def __init__(self, sceneName, sourceName, setVisible): Baserequests.__init__(self) self.name = 'AddSceneItem' self.datain['itemId'] = None self.dataout['sceneName'] = sceneName self.dataout['sourceName'] = sourceName self.dataout['setVisible'] = setVisible def getItemId(self): return self.datain['itemId'] class DuplicateSceneItem(Baserequests): """Duplicates a scene item. :Arguments: *fromScene* type: String (optional) Name of the scene to copy the item from. Defaults to the current scene. *toScene* type: String (optional) Name of the scene to create the item in. Defaults to the current scene. *item* type: Object Scene Item to duplicate from the source scene (required) *item.name* type: String Scene Item name (prefer `id`, including both is acceptable). *item.id* type: int Scene Item ID. :Returns: *scene* type: String Name of the scene where the new item was created *item* type: Object New item info *item.id* type: int New item ID *item.name* type: String New item name """ def __init__(self, item, fromScene=None, toScene=None): Baserequests.__init__(self) self.name = 'DuplicateSceneItem' self.datain['scene'] = None self.datain['item'] = None self.dataout['item'] = item self.dataout['fromScene'] = fromScene self.dataout['toScene'] = toScene def getScene(self): return self.datain['scene'] def getItem(self): return self.datain['item'] class SetCurrentScene(Baserequests): """Switch to the specified scene. :Arguments: *scene_name* type: String Name of the scene to switch to. """ def __init__(self, scene_name): Baserequests.__init__(self) self.name = 'SetCurrentScene' self.dataout['scene-name'] = scene_name class GetCurrentScene(Baserequests): """Get the current scene's name and source items. :Returns: *name* type: String Name of the currently active scene. *sources* type: Array<SceneItem> Ordered list of the current scene's source items. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetCurrentScene' self.datain['name'] = None self.datain['sources'] = None def getName(self): return self.datain['name'] def getSources(self): return self.datain['sources'] class GetSceneList(Baserequests): """Get a list of scenes in the currently active profile. :Returns: *current_scene* type: String Name of the currently active scene. *scenes* type: Array<Scene> Ordered list of the current profile's scenes (See [GetCurrentScene](#getcurrentscene) for more information). """ def __init__(self): Baserequests.__init__(self) self.name = 'GetSceneList' self.datain['current-scene'] = None self.datain['scenes'] = None def getCurrentScene(self): return self.datain['current-scene'] def getScenes(self): return self.datain['scenes'] class CreateScene(Baserequests): """Create a new scene scene. :Arguments: *sceneName* type: String Name of the scene to create. """ def __init__(self, sceneName): Baserequests.__init__(self) self.name = 'CreateScene' self.dataout['sceneName'] = sceneName class ReorderSceneItems(Baserequests): """Changes the order of scene items in the requested scene. :Arguments: *scene* type: String (optional) Name of the scene to reorder (defaults to current). *items* type: Array<Scene> Ordered list of objects with name and/or id specified. Id preferred due to uniqueness per scene *items.*.id* type: int (optional) Id of a specific scene item. Unique on a scene by scene basis. *items.*.name* type: String (optional) Name of a scene item. Sufficiently unique if no scene items share sources within the scene. """ def __init__(self, items, scene=None): Baserequests.__init__(self) self.name = 'ReorderSceneItems' self.dataout['items'] = items self.dataout['scene'] = scene class SetSceneTransitionOverride(Baserequests): """Set a scene to use a specific transition override. :Arguments: *sceneName* type: String Name of the scene to switch to. *transitionName* type: String Name of the transition to use. *transitionDuration* type: int (Optional) Duration in milliseconds of the transition if transition is not fixed. Defaults to the current duration specified in the UI if there is no current override and this value is not given. """ def __init__(self, sceneName, transitionName, transitionDuration): Baserequests.__init__(self) self.name = 'SetSceneTransitionOverride' self.dataout['sceneName'] = sceneName self.dataout['transitionName'] = transitionName self.dataout['transitionDuration'] = transitionDuration class RemoveSceneTransitionOverride(Baserequests): """Remove any transition override on a scene. :Arguments: *sceneName* type: String Name of the scene to switch to. """ def __init__(self, sceneName): Baserequests.__init__(self) self.name = 'RemoveSceneTransitionOverride' self.dataout['sceneName'] = sceneName class GetSceneTransitionOverride(Baserequests): """Get the current scene transition override. :Arguments: *sceneName* type: String Name of the scene to switch to. :Returns: *transitionName* type: String Name of the current overriding transition. Empty string if no override is set. *transitionDuration* type: int Transition duration. `-1` if no override is set. """ def __init__(self, sceneName): Baserequests.__init__(self) self.name = 'GetSceneTransitionOverride' self.datain['transitionName'] = None self.datain['transitionDuration'] = None self.dataout['sceneName'] = sceneName def getTransitionName(self): return self.datain['transitionName'] def getTransitionDuration(self): return self.datain['transitionDuration'] class GetStreamingStatus(Baserequests): """Get current streaming and recording status. :Returns: *streaming* type: boolean Current streaming status. *recording* type: boolean Current recording status. *stream_timecode* type: String (optional) Time elapsed since streaming started (only present if currently streaming). *rec_timecode* type: String (optional) Time elapsed since recording started (only present if currently recording). *preview_only* type: boolean Always false. Retrocompatibility with OBSRemote. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetStreamingStatus' self.datain['streaming'] = None self.datain['recording'] = None self.datain['stream-timecode'] = None self.datain['rec-timecode'] = None self.datain['preview-only'] = None def getStreaming(self): return self.datain['streaming'] def getRecording(self): return self.datain['recording'] def getStreamTimecode(self): return self.datain['stream-timecode'] def getRecTimecode(self): return self.datain['rec-timecode'] def getPreviewOnly(self): return self.datain['preview-only'] class StartStopStreaming(Baserequests): """Toggle streaming on or off (depending on the current stream state). """ def __init__(self): Baserequests.__init__(self) self.name = 'StartStopStreaming' class StartStreaming(Baserequests): """Start streaming. Will return an `error` if streaming is already active. :Arguments: *stream* type: Object (optional) Special stream configuration. Please note: these won't be saved to OBS' configuration. *stream.type* type: String (optional) If specified ensures the type of stream matches the given type (usually 'rtmp_custom' or 'rtmp_common'). If the currently configured stream type does not match the given stream type, all settings must be specified in the `settings` object or an error will occur when starting the stream. *stream.metadata* type: Object (optional) Adds the given object parameters as encoded query string parameters to the 'key' of the RTMP stream. Used to pass data to the RTMP service about the streaming. May be any String, Numeric, or Boolean field. *stream.settings* type: Object (optional) Settings for the stream. *stream.settings.server* type: String (optional) The publish URL. *stream.settings.key* type: String (optional) The publish key of the stream. *stream.settings.use_auth* type: boolean (optional) Indicates whether authentication should be used when connecting to the streaming server. *stream.settings.username* type: String (optional) If authentication is enabled, the username for the streaming server. Ignored if `use_auth` is not set to `true`. *stream.settings.password* type: String (optional) If authentication is enabled, the password for the streaming server. Ignored if `use_auth` is not set to `true`. """ def __init__(self, stream=None): Baserequests.__init__(self) self.name = 'StartStreaming' self.dataout['stream'] = stream class StopStreaming(Baserequests): """Stop streaming. Will return an `error` if streaming is not active. """ def __init__(self): Baserequests.__init__(self) self.name = 'StopStreaming' class SetStreamSettings(Baserequests): """Sets one or more attributes of the current streaming server settings. Any options not passed will remain unchanged. Returns the updated settings in response. If 'type' is different than the current streaming service type, all settings are required. Returns the full settings of the stream (the same as GetStreamSettings). :Arguments: *type* type: String The type of streaming service configuration, usually `rtmp_custom` or `rtmp_common`. *settings* type: Object The actual settings of the stream. *settings.server* type: String (optional) The publish URL. *settings.key* type: String (optional) The publish key. *settings.use_auth* type: boolean (optional) Indicates whether authentication should be used when connecting to the streaming server. *settings.username* type: String (optional) The username for the streaming service. *settings.password* type: String (optional) The password for the streaming service. *save* type: boolean Persist the settings to disk. """ def __init__(self, type, settings, save): Baserequests.__init__(self) self.name = 'SetStreamSettings' self.dataout['type'] = type self.dataout['settings'] = settings self.dataout['save'] = save class GetStreamSettings(Baserequests): """Get the current streaming server settings. :Returns: *type* type: String The type of streaming service configuration. Possible values: 'rtmp_custom' or 'rtmp_common'. *settings* type: Object Stream settings object. *settings.server* type: String The publish URL. *settings.key* type: String The publish key of the stream. *settings.use_auth* type: boolean Indicates whether authentication should be used when connecting to the streaming server. *settings.username* type: String The username to use when accessing the streaming server. Only present if `use_auth` is `true`. *settings.password* type: String The password to use when accessing the streaming server. Only present if `use_auth` is `true`. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetStreamSettings' self.datain['type'] = None self.datain['settings'] = None def getType(self): return self.datain['type'] def getSettings(self): return self.datain['settings'] class SaveStreamSettings(Baserequests): """Save the current streaming server settings to disk. """ def __init__(self): Baserequests.__init__(self) self.name = 'SaveStreamSettings' class SendCaptions(Baserequests): """Send the provided text as embedded CEA-608 caption data. :Arguments: *text* type: String Captions text """ def __init__(self, text): Baserequests.__init__(self) self.name = 'SendCaptions' self.dataout['text'] = text class GetStudioModeStatus(Baserequests): """Indicates if Studio Mode is currently enabled. :Returns: *studio_mode* type: boolean Indicates if Studio Mode is enabled. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetStudioModeStatus' self.datain['studio-mode'] = None def getStudioMode(self): return self.datain['studio-mode'] class GetPreviewScene(Baserequests): """Get the name of the currently previewed scene and its list of sources. Will return an `error` if Studio Mode is not enabled. :Returns: *name* type: String The name of the active preview scene. *sources* type: Array<SceneItem> """ def __init__(self): Baserequests.__init__(self) self.name = 'GetPreviewScene' self.datain['name'] = None self.datain['sources'] = None def getName(self): return self.datain['name'] def getSources(self): return self.datain['sources'] class SetPreviewScene(Baserequests): """Set the active preview scene. Will return an `error` if Studio Mode is not enabled. :Arguments: *scene_name* type: String The name of the scene to preview. """ def __init__(self, scene_name): Baserequests.__init__(self) self.name = 'SetPreviewScene' self.dataout['scene-name'] = scene_name class TransitionToProgram(Baserequests): """Transitions the currently previewed scene to the main output. Will return an `error` if Studio Mode is not enabled. :Arguments: *with_transition* type: Object (optional) Change the active transition before switching scenes. Defaults to the active transition. *with_transition.name* type: String Name of the transition. *with_transition.duration* type: int (optional) Transition duration (in milliseconds). """ def __init__(self, with_transition=None): Baserequests.__init__(self) self.name = 'TransitionToProgram' self.dataout['with-transition'] = with_transition class EnableStudioMode(Baserequests): """Enables Studio Mode. """ def __init__(self): Baserequests.__init__(self) self.name = 'EnableStudioMode' class DisableStudioMode(Baserequests): """Disables Studio Mode. """ def __init__(self): Baserequests.__init__(self) self.name = 'DisableStudioMode' class ToggleStudioMode(Baserequests): """Toggles Studio Mode (depending on the current state of studio mode). """ def __init__(self): Baserequests.__init__(self) self.name = 'ToggleStudioMode' class GetTransitionList(Baserequests): """List of all transitions available in the frontend's dropdown menu. :Returns: *current_transition* type: String Name of the currently active transition. *transitions* type: Array<Object> List of transitions. *transitions.*.name* type: String Name of the transition. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetTransitionList' self.datain['current-transition'] = None self.datain['transitions'] = None def getCurrentTransition(self): return self.datain['current-transition'] def getTransitions(self): return self.datain['transitions'] class GetCurrentTransition(Baserequests): """Get the name of the currently selected transition in the frontend's dropdown menu. :Returns: *name* type: String Name of the selected transition. *duration* type: int (optional) Transition duration (in milliseconds) if supported by the transition. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetCurrentTransition' self.datain['name'] = None self.datain['duration'] = None def getName(self): return self.datain['name'] def getDuration(self): return self.datain['duration'] class SetCurrentTransition(Baserequests): """Set the active transition. :Arguments: *transition_name* type: String The name of the transition. """ def __init__(self, transition_name): Baserequests.__init__(self) self.name = 'SetCurrentTransition' self.dataout['transition-name'] = transition_name class SetTransitionDuration(Baserequests): """Set the duration of the currently selected transition if supported. :Arguments: *duration* type: int Desired duration of the transition (in milliseconds). """ def __init__(self, duration): Baserequests.__init__(self) self.name = 'SetTransitionDuration' self.dataout['duration'] = duration class GetTransitionDuration(Baserequests): """Get the duration of the currently selected transition if supported. :Returns: *transition_duration* type: int Duration of the current transition (in milliseconds). """ def __init__(self): Baserequests.__init__(self) self.name = 'GetTransitionDuration' self.datain['transition-duration'] = None def getTransitionDuration(self): return self.datain['transition-duration'] class GetTransitionPosition(Baserequests): """Get the position of the current transition. :Returns: *position* type: double current transition position. This value will be between 0.0 and 1.0. Note: Transition returns 1.0 when not active. """ def __init__(self): Baserequests.__init__(self) self.name = 'GetTransitionPosition' self.datain['position'] = None def getPosition(self): return self.datain['position'] class GetTransitionSettings(Baserequests): """Get the current settings of a transition :Arguments: *transitionName* type: String Transition name :Returns: *transitionSettings* type: Object Current transition settings """ def __init__(self, transitionName): Baserequests.__init__(self) self.name = 'GetTransitionSettings' self.datain['transitionSettings'] = None self.dataout['transitionName'] = transitionName def getTransitionSettings(self): return self.datain['transitionSettings'] class SetTransitionSettings(Baserequests): """Change the current settings of a transition :Arguments: *transitionName* type: String Transition name *transitionSettings* type: Object Transition settings (they can be partial) :Returns: *transitionSettings* type: Object Updated transition settings """ def __init__(self, transitionName, transitionSettings): Baserequests.__init__(self) self.name = 'SetTransitionSettings' self.datain['transitionSettings'] = None self.dataout['transitionName'] = transitionName self.dataout['transitionSettings'] = transitionSettings def getTransitionSettings(self): return self.datain['transitionSettings'] class ReleaseTBar(Baserequests): """Release the T-Bar (like a user releasing their mouse button after moving it). *YOU MUST CALL THIS if you called `SetTBarPosition` with the `release` parameter set to `false`.* """ def __init__(self): Baserequests.__init__(self) self.name = 'ReleaseTBar' class SetTBarPosition(Baserequests): """ If your code needs to perform multiple successive T-Bar moves (e.g. : in an animation, or in response to a user moving a T-Bar control in your User Interface), set `release` to false and call `ReleaseTBar` later once the animation/interaction is over. :Arguments: *position* type: double T-Bar position. This value must be between 0.0 and 1.0. *release* type: boolean (optional) Whether or not the T-Bar gets released automatically after setting its new position (like a user releasing their mouse button after moving the T-Bar). Call `ReleaseTBar` manually if you set `release` to false. Defaults to true. """ def __init__(self, position, release=None): Baserequests.__init__(self) self.name = 'SetTBarPosition' self.dataout['position'] = position self.dataout['release'] = release
1.453125
1
flask__webservers/bootstrap_4__toggle_switch__examples/main.py
DazEB2/SimplePyScripts
0
3328
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' # SOURCE: https://github.com/twbs/bootstrap # SOURCE: https://github.com/gitbrent/bootstrap4-toggle # SOURCE: https://gitbrent.github.io/bootstrap4-toggle/ from flask import Flask, render_template app = Flask(__name__) import logging logging.basicConfig(level=logging.DEBUG) @app.route("/") def index(): return render_template('index.html') if __name__ == '__main__': app.debug = True # Localhost # port=0 -- random free port # app.run(port=0) app.run( port=5000, # :param threaded: should the process handle each request in a separate # thread? # :param processes: if greater than 1 then handle each request in a new process # up to this maximum number of concurrent processes. threaded=True, ) # # Public IP # app.run(host='0.0.0.0')
1.414063
1
twitoff/twitter.py
ChristopherKchilton/twitoff-ChristopherKchilton
1
3368
"""Retrieve and request tweets from the DS API""" import requests import spacy from .models import DB, Tweet, User nlp = spacy.load("my_model") def vectorize_tweet(tweet_text): return nlp(tweet_text).vector # Add and updates tweets def add_or_update_user(username): """Adds and updates the user with twiter handle 'username' to our database """ #TODO: Figure out try: r = requests.get( f"https://lambda-ds-twit-assist.herokuapp.com/user/{username}") user = r.json() user_id = user["twitter_handle"]["id"] # print(user) # This is either respectively grabs or creates a user for our db db_user = (User.query.get(user_id)) or User(id=user_id, name=username) # This adds the db_user to our database DB.session.add(db_user) tweets = user["tweets"] # if tweets: # db_user.newest_tweet_id = tweets[0].id for tweet in tweets: tweet_vector = vectorize_tweet(tweet["full_text"]) tweet_id = tweet["id"] db_tweet = (Tweet.query.get(tweet_id)) or Tweet( id=tweet["id"], text=tweet["full_text"], vect=tweet_vector) db_user.tweets.append(db_tweet) DB.session.add(db_tweet) except Exception as e: print("Error processing {}: {}".format(username, e)) raise e else: DB.session.commit()
2.546875
3
token_train/quickdemo(1)(1).py
Tatsuya26/processamento_de_linguagens
0
3384
import ply.lex as lex tokens =["NUM","OPERADORES"] t_NUM = '\d+' t_OPERADORES = '[+|*|-]' t_ignore='\n\t ' def t_error(t): print("Erro") print(t) lexer = lex.lex() # 1+2 1-2 1*2 # ola mundo import sys for line in sys.stdin: lexer.input(line) for tok in lexer: print(tok)
2.265625
2
cogs/owner.py
Obsidian-Development/JDBot
0
3416
from discord.ext import commands, menus import utils import random , discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy import traceback, textwrap from discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(brief="a command to send mail") async def mail(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.reply("User not found, returning Letter") user = ctx.author if user: await ctx.reply("Please give me a message to use.") message = await self.bot.wait_for("message",check = utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f"Mail from: {ctx.author}",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f"{ctx.author.id}") embed_message.set_thumbnail(url = "https://i.imgur.com/1XvDnqC.png") if (user.dm_channel is None): await user.create_dm() try: await user.send(embed=embed_message) except: user = ctx.author await user.send(content="Message failed. sending",embed=embed_message) embed_message.add_field(name="Sent To:",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self, ctx, *, cog = None): if cog: try: self.bot.load_extension(cog) except Exception as e: await ctx.send(e) traceback.print_exc() await ctx.send("Loaded cog(see if there's any errors)") if cog is None: await ctx.send("you can't ask to load no cogs.") @commands.command() async def reload(self, ctx, *, cog = None): cog = cog or "all" if cog == "all": for x in list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send("done reloading all cogs(check for any errors)") else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send("Cog reloaded :D (check for any errors)") @commands.command() async def unload(self, ctx, *, cog = None): if cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send("Cog should be unloaded just fine :D.(check any errors)") if cog is None: await ctx.send("you can't ask to reload no cogs") @commands.command() async def shutdown(self, ctx): await ctx.send("shutdown/logout time happening.") await self.bot.close() async def cog_check(self, ctx): return await self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx, error): if ctx.command or not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I need to fix all cog_command_error @commands.command(brief="Changes Bot Status(Owner Only)") async def status(self , ctx , * , args=None): if await self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is False: await ctx.send("That's an owner only command") @commands.command(brief="Only owner command to change bot's nickname") async def change_nick(self, ctx ,*, name=None): if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await ctx.send("Changing Nickname") try: await ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send("Appears not to have valid perms") if isinstance(ctx.channel,discord.DMChannel): await ctx.send("You can't use that in Dms.") if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that command") class ServersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title="Servers:",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief="a command to give a list of servers(owner only)",help="Gives a list of guilds(Bot Owners only)") async def servers(self, ctx): if await self.bot.is_owner(ctx.author): pag = commands.Paginator() for g in self.bot.guilds: pag.add_line(f"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}") pages = [page.strip("`") for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that it's owner only") @commands.command(brief="only works with JDJG, but this command is meant to send updates to my webhook") async def webhook_update(self, ctx, *, args = None): if await self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete() except: await ctx.send("It couldn't delete the message in this guils so, I kept it here.") webhook = discord.Webhook.from_url(os.environ["webhook1"], session = self.bot.session) embed=discord.Embed(title="Update",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name="Update Info:",value=args) embed.set_author(name="<NAME>",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text="JDJG's Updates") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ["webhook99"], session = self.bot.session) embed=discord.Embed(title="Update",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name="Update Info:",value=args) embed.set_author(name="<NAME>",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text="JDJG's Updates") await webhook.send(embed=embed) if args is None: await ctx.send("You sadly can't use it like that.") if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that") @commands.command(brief="Commands to see what guilds a person is in.") async def mutualguilds(self, ctx, *, user: utils.BetterUserconverter = None): user = user or ctx.author pag = commands.Paginator() for g in user.mutual_guilds: pag.add_line(f"{g}") pages = [page.strip("`") for page in pag.pages] pages = pages or ["No shared servers"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief="A command to add sus_users with a reason") async def addsus(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("can't have a user be none.") if user: await ctx.reply("Please give me a reason why:") reason = await self.bot.wait_for("message",check= utils.check(ctx)) cur = await self.bot.sus_users.cursor() await cur.execute("INSERT INTO sus_users VALUES (?, ?)", (user.id, reason.content)) await self.bot.sus_users.commit() await cur.close() await ctx.send("added sus users, succesfully") @commands.command(brief="a command to remove sus users.") async def removesus(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("You can't have a none user.") if user: cur = await self.bot.sus_users.cursor() await cur.execute("DELETE FROM sus_users WHERE user_id = ?", (user.id,)) await self.bot.sus_users.commit() await cur.close() await ctx.send("Removed sus users.") class SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed=discord.Embed(title = "Users Deemed Suspicious by JDJG Inc. Official", color = random.randint(0, 16777215)) embed.add_field(name = f"User ID : {item[0]}", value = f"**Reason :** {item[1]}", inline = False) return embed @commands.command(brief="a command to grab all in the sus_users list") async def sus_users(self, ctx): cur = await self.bot.sus_users.cursor() cursor = await cur.execute("SELECT * FROM SUS_USERS;") sus_users = tuple(await cursor.fetchall()) await cur.close() await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async def sus_users_error(self, ctx, error): await ctx.send(error) class TestersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title = "Testing Users:", color = random.randint(0, 16777215)) embed.add_field(name = "User ID:", value = f"{item}", inline = False) return embed @commands.command(brief = "a command listed all the commands") async def testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after = True) await menu.start(ctx) @commands.command() async def update_sus(self, ctx): await self.bot.sus_users.commit() await ctx.send("Updated SQL boss.") @update_sus.error async def update_sus_error(self, ctx, error): await ctx.send(error) @commands.command(aliases=["bypass_command"]) async def command_bypass(self, ctx ,user: utils.BetterUserconverter = None, *, command = None): #make sure to swap to autoconverter if it gets added. user = user or ctx.author if command: command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f"{command_wanted.name} now accessible for the {user} for one command usage!") self.bot.special_access[user.id]=command_wanted.name if command_wanted is None: await ctx.send("Please specify a valid command.") if command is None: await ctx.send("select a command :(") @commands.command(brief = "resets cooldown for you.",aliases = ["reset_cooldown"]) async def resetcooldown(self, ctx, *, command = None): if not command: return await ctx.send("please specificy a command") command_wanted = self.bot.get_command(command) if not command_wanted: return await ctx.send("please specify a command") if not command_wanted.is_on_cooldown(ctx): return await ctx.send("That doesn't have a cooldown/isn't on a cooldown.") command_wanted.reset_cooldown(ctx) await ctx.send(f"reset cooldown of {command_wanted}") @commands.command(brief = "leaves a guild only use when needed or really wanted. Otherwise no thanks.") async def leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild] = None): guild = guild or ctx.guild if guild is None: return await ctx.send("Guild is None can't do anything.") await ctx.send("Bot leaving guild :(") try: await guild.leave() except Exception as e: await ctx.send(f"Somehow an error occured: {e}") traceback.print_exc() @commands.command() async def aioinput_test(self, ctx, *, args = None): args = args or "Test" result=await self.bot.loop.run_in_executor(None, input, (f"{args}:")) await ctx.send(f"Result of the input was {result}") @commands.command(brief="a powerful owner tool to reload local files that aren't reloadable.") async def reload_basic(self, ctx, *, args = None): if args is None:await ctx.send("Can't reload module named None") if args: try: module = importlib.import_module(name=args) except Exception as e: traceback.print_exc() return await ctx.send(e) try: value=importlib.reload(module) except Exception as e: traceback.print_exc() return await ctx.send(e) await ctx.send(f"Sucessfully reloaded {value.__name__} \nMain Package: {value.__package__}") @commands.command(brief="backs up a channel and then sends it into a file or mystbin") async def channel_backup(self, ctx): messages = await ctx.channel.history(limit = None, oldest_first = True).flatten() new_line = "\n" page = "\n".join(f"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {msg.content} {new_line}Attachments : {msg.attachments}" if msg.content else f"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}' for e in msg.embeds)} {new_line}Attachments : {msg.attachments}" for msg in messages) mystbin_client = mystbin.Client(session = self.bot.session) paste = await mystbin_client.post(page) await ctx.author.send(content=f"Added text file to mystbin: \n{paste.url}") @channel_backup.error async def channel_backup_error(self, ctx, error): etype = type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages = textwrap.wrap(values, width = 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values) await ctx.send(f"Traceback: {paste.url}") @commands.command(brief = "adds packages and urls to rtfm DB", aliases=["add_rtfm"]) async def addrtfm(self, ctx, name = None, *, url = None): if not name or not url or not name and not url: return await ctx.send("You need a name and also url.") cur = await self.bot.sus_users.cursor() await cur.execute("INSERT INTO RTFM_DICTIONARY VALUES (?, ?)", (name, url)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f"added {name} and {url} to the rtfm DB") @commands.command(brief = "removes packages from the rtfm DB", aliases = ["remove_rtfm"]) async def removertfm(self, ctx, *, name = None): if name is None: return await ctx.send("You can't remove None") cur = await self.bot.sus_users.cursor() await cur.execute("DELETE FROM RTFM_DICTIONARY WHERE name = ?", (name,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f"Removed the rfm value {name}.") @commands.command(brief = "a command to save images to imgur(for owner only lol)") async def save_image(self, ctx): if not ctx.message.attachments: return await ctx.send("You need to provide some attachments.") await ctx.send("JDJG doesn't take any responbility for what you upload here :eyes: don't upload anything bad okay?") for x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except Exception as e: traceback.print_exc() return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ["imgur_id"], os.environ["imgur_secret"]) imgur_url = await imgur_client.upload(await x.read()) await ctx.send(f"{imgur_url['link']}") @commands.command(brief="A command to remove testers") async def remove_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("You can't have a non existent user.") if user: cur = await self.bot.sus_users.cursor() await cur.execute("DELETE FROM testers_list WHERE user_id = ?", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: return await ctx.send(f"{user} isn't in the testers list.") else: self.bot.testers.remove(user.id) await ctx.send(f"Removed tester known as {user}") @commands.command(brief="A command to add testers") async def add_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("You can't have a non existent user.") if user: cur = await self.bot.sus_users.cursor() await cur.execute("INSERT INTO testers_list VALUES (?)", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f"added tester known as {user}") else: return await ctx.send(f"{user} is in the testers list already!") def tweepy_post(self, post_text = None): consumer_key = os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth) return twitter_api.update_status(status = post_text) @commands.command(brief = "sends tweet to JDBot Twitter") async def send_tweet(self, ctx, *, args = None): if not args: return await ctx.send("you can't send nothing to twitter.") try: tweet_time = functools.partial(self.tweepy_post, args) post = await self.bot.loop.run_in_executor(None, tweet_time) except Exception as e: traceback.print_exc() return await ctx.send(f"Exception occured at {e}") await ctx.send(f"Url of sent tweet is: https://twitter.com/twitter/statuses/{post.id}") @commands.command(brief = "chunks a guild for the purpose of testing purpose(it's owner only to be used in testing guilds only)") async def chunk_guild(self, ctx): if ctx.guild is None: return await ctx.send("You can't chunk a guild that doesn't exist or a channel that is a DM.") if ctx.guild.chunked: return await ctx.send("No need to chunk this guild, it appears to be chunked") await ctx.guild.chunk(cache = True) await ctx.send("Finished chunking..") @chunk_guild.error async def chunk_guild_error(self, ctx, error): await ctx.send(error) traceback.print_exc() @commands.command(brief = "displays the guild status and user status immediately") async def stats_status(self, ctx): await ctx.send("changing status, check now....") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f"{len(self.bot.guilds)} servers | {len(self.bot.users)} users")) @stats_status.error async def stats_status_error(self, ctx, error): await ctx.send(error) @commands.command(brief="a command to give a list of servers(owner only)",help="Gives a list of guilds(Bot Owners only) but with join dates updated.") async def servers2(self, ctx): if await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag = commands.Paginator() for g in sorted_guilds: pag.add_line(f"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at, style = 'T')} \n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\n") pages = [page.strip("`") for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that it's owner only") def setup(bot): bot.add_cog(Owner(bot))
1.742188
2
src/mushme.py
MuShMe/MuShMe
1
3440
#!/usr/bin/env python # -*- coding: utf-8 -*- from src import app import os import shutil from flask import Flask, render_template, session, request, flash, url_for, redirect from Forms import ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail import Message, Mail from werkzeug import secure_filename from werkzeug import SharedDataMiddleware from api import API from songs import SONG from playlist import playlist from admin import admin from artist import artist import pymysql import hashlib from flask import g mail = Mail() mail.init_app(app) #For the collector script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER = "img/ProfilePic/" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER @app.route('/') def index(): session["login"] = False session["signup"] = False session["logged_in"] = False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections. @app.before_request def before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database = g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def login(): session["login"] = True session["signup"] = False if request.method == 'POST': loginform = LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login = g.database.execute("""SELECT User_id from MuShMe.entries WHERE Email_id="%s" AND Pwdhash="%s" """ % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone() g.database.execute("""UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id="%s" """ % (userid)) g.conn.commit() for uid in userid: session['userid'] = uid g.database.execute("""SELECT Username from MuShMe.entries WHERE User_id="%s" """ % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute("""SELECT Privilege FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute("""SELECT Profile_pic FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute("""SELECT Name from MuShMe.entries WHERE User_id="%s" """ % uid ) session["Name"]=g.database.fetchone() g.database.execute("""SELECT DOB from MuShMe.entries WHERE User_id="%s" """ % uid ) session["dob"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True #print uid #print userid return redirect(url_for('userProfile', userid=uid)) else: flash("Incorrect Email-Id or Password") else: flash("Incorrect Email-Id or Password") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def flash_errors(form): for field, errors in form.errors.items(): for error in errors: flash(u"Error in the %s field - %s" % ( getattr(form, field).label.text, error )) @app.route('/signup', methods=['POST']) def signup(): session["signup"] = True session["login"] = False contactform = ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute("""INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES ("%s","%s","%s","%s")""" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit() g.database.execute("""SELECT User_id from MuShMe.entries WHERE Email_id="%s" AND Pwdhash="%s" """ % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for uid in user_id: session['userid'] = uid g.database.execute("""SELECT Username from MuShMe.entries WHERE User_id="%s" """ % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute("""SELECT Privilege FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute("""SELECT Profile_Pic FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in'] = True g.database.execute("""SELECT Name from MuShMe.entries WHERE User_id="%s" """ % uid ) session["Name"]=g.database.fetchone() g.database.execute("""SELECT DOB from MuShMe.entries WHERE User_id="%s" """ % uid ) session["dob"]=str(g.database.fetchone()) newPlaylist = session['UserName'] + ' default collection' g.database.execute("""INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES ("%s","%s")""" % (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash("Please enter valid data !") else: flash("Username or Email has been taken") else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email = g.database.execute(""" SELECT * from MuShMe.entries where Email_id="%s" """ % email) name = g.database.execute(""" SELECT * from MuShMe.entries where Username="%s" """ % username) if email or name: return False else: return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in'] == False: return render_template('error.html'), 404 else: if request.method == 'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[] g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" """ % (userid)) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" and User_id2="%s" """ % (userid,f['friendid'])) b=g.database.execute("""SELECT User_id2 from friends WHERE User_id2="%s" and User_id1="%s" """ % (userid,f['friendid'])) if a or b: return True elif userid == f['friendid']: return True else: return False g.database.execute("""SELECT User_id1 from friends WHERE User_id2="%s" """ % userid) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute("""SELECT User_id2 from friends WHERE User_id2="%s" and User_id1="%s" """ % (userid,f['friendid'])) b=g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" and User_id2="%s" """ % (userid,f['friendid'])) if a or b: return True elif userid == f['friendid']: return True else: return False def getAllComments(userid): g.database.execute("SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC" % (userid)) commentids = g.database.fetchall() retval = [] for commentid in commentids: g.database.execute("SELECT Comment, User_id FROM comments WHERE Comment_id=%s", (commentid[0])) commentdata = g.database.fetchone() data = {} data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute("SELECT Username FROM entries WHERE User_id=%s", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getComments(userid): g.database.execute("SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC LIMIT 5" % (userid)) commentids = g.database.fetchall() retval = [] for commentid in commentids: g.database.execute("SELECT Comment, User_id FROM comments WHERE Comment_id=%s", (commentid[0])) commentdata = g.database.fetchone() data = {} data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute("SELECT Username FROM entries WHERE User_id=%s", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getFriend(userid): friendName =[] g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" """ % userid) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute("""SELECT User_id1 from friends WHERE User_id2="%s" """ % userid) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName return friendName def getPlaylist(userid): playlist = [] g.database.execute("""SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id="%s" """ % userid) for p in g.database.fetchall(): data = {} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist def getSong(userid): songName = [] g.database.execute("""SELECT Song_id from MuShMe.user_song WHERE User_id=%s LIMIT 5""" % userid) for song in g.database.fetchall(): data = {} g.database.execute("""SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id="%s" """ % song) for a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute("SELECT Album_pic FROM albums WHERE Album_id=%s " % (a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data) return songName def getUserData(userid): User = [] g.database.execute(""" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id="%s" """ % userid) for a in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User def getAllRecommend(userid): recommend =[] g.database.execute(""" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to="%s" """ % userid) for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(""" SELECT Username from entries where User_id='%s' """ % a[1]) data['userfromname'] = g.database.fetchone()[0] check_song = g.database.execute(""" SELECT Song_id from recommend_songs where Recommend_id="%s" """ % a[0]) if check_song: songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(""" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id="%s" """ % songid) for song in g.database.fetchall(): d = {} d['title']=song[0] d['album'] = song[1] d['genre'] = song[2] d['publisher'] = song[3] d['songid'] = songid data['song'].append(d) check_playlist = g.database.execute(""" SELECT Playlist_id from recommend_playlists where Recommend_id="%s" """ % a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(""" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id="%s" """ % playlistid) for p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(""" SELECT Username, Name,User_id from MuShMe.entries WHERE User_id="%s" """ % p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRecommend(userid): recommend =[] g.database.execute(""" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to="%s" LIMIT 5 """ % userid) for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(""" SELECT Username from entries where User_id='%s' """ % a[1]) data['userfromname'] = g.database.fetchone()[0] print data['userfromname'] check_song = g.database.execute(""" SELECT Song_id from recommend_songs where Recommend_id="%s" """ % a[0]) if check_song: songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(""" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id="%s" """ % songid) for song in g.database.fetchall(): d = {} d['title']=song[0] d['album'] = song[1] d['genre'] = song[2] d['publisher'] = song[3] d['songid'] = songid d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(""" SELECT Playlist_id from recommend_playlists where Recommend_id="%s" """ % a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(""" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id="%s" """ % playlistid) for p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(""" SELECT Username, Name,User_id from MuShMe.entries WHERE User_id="%s" """ % p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRequest(userid): request =[] g.database.execute(""" SELECT Request_id,Request_from,Request_to,Status from requests where Request_to="%s" """ % userid) for a in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = [] g.database.execute(""" SELECT User_id,Username,Name from entries where User_id='%s' """ % a[1]) for i in g.database.fetchall(): d={} d['userid'] = i[0] d['username'] = i[1] d['name'] = i[2] data['reqfromuser'].append(d) print data request.append(data) return request def getSongArt(songid): g.database.execute("SELECT Song_Album FROM songs WHERE song_id=%s", (songid)) albumname = g.database.fetchone()[0] g.database.execute("SELECT Album_pic FROM albums WHERE Album_id=%s", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method == 'POST': uid = userid print request.form if request.form['editname'] != '': g.database.execute("""UPDATE MuShMe.entries SET Name=%s WHERE User_id=%s """, ([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year'] != '0' and request.form['birthday_month'] != '0' and request.form['birthday_day'] != '0': g.database.execute("""UPDATE MuShMe.entries SET DOB="%s-%s-%s" WHERE User_id="%s" """ % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid): if request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER + filename session['profilepic'] = filepath g.database.execute("""UPDATE MuShMe.entries SET Profile_pic="%s" WHERE User_id="%s" """ % (filepath, userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if request.method == 'POST': commentform = CommentForm(request.form, prefix='form4') #print senderid #print rcvrid if commentform.comment.data: query = ("""INSERT INTO MuShMe.comments (comment_type, Comment, User_id) VALUES ("%s","%s","%s") """ % ('U',commentform.comment.data, senderid)) print query g.database.execute(query) g.conn.commit() g.database.execute("""SELECT Comment_id from MuShMe.comments WHERE Comment="%s" """ % (commentform.comment.data)) data = g.database.fetchone()[0] #print data enter_comment = g.database.execute("""INSERT INTO MuShMe.user_comments (Comment_id, User_id) VALUES ("%s","%s")""" % (data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute("""SELECT User_id FROM MuShMe.user_comments WHERE Comment_id="%s" """ % data) #print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method == 'POST': reportform = ReportForm(request.form, prefix='form5') print reportform.report.data check_report = g.database.execute("""INSERT INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES ("%s","%s","%s","%s") """ % (reportform.report.data, reportform.other.data, commentid, session['userid'] )) if check_report == True: g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method == 'POST': if requestvalidate(uidfrom,uidto): query=(""" INSERT INTO requests (Request_from,Request_to,Status) VALUES ("%s","%s","%s") """ % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method == 'POST': query=(""" UPDATE requests SET Status="%s" WHERE Request_from="%s" and Request_to="%s" """ % (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query = (""" INSERT INTO friends Values ("%s","%s") """ % (userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method == 'POST': query=(""" UPDATE requests SET Status="%s" WHERE Request_from="%s" and Request_to="%s" """ % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check = g.database.execute(""" SELECT Status from requests where Request_to="%s" and Request_from="%s" """ % (userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else: return True @app.route('/search',methods=['POST','GET']) def search(): if request.method == 'POST': searchform = searchForm(prefix='form6') #print 'f' value = searchform.entry.data + '%' search_fname = [] search_song= [] search_friend = [] search_playlist =[] search_artist = [] check_song = g.database.execute("""SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title LIKE "%s" """ % ( value )) for a in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute("""SELECT Artist_name, Artist_id from MuShMe.artists WHERE Artist_name LIKE "%s" """ % ( value )) for a in g.database.fetchall(): data = {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute("""SELECT Username, Name, Profile_pic, User_id from MuShMe.entries WHERE Username LIKE "%s" or Name LIKE "%s" """ % ( value, value )) for a in g.database.fetchall(): data = {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute("""SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE Playlist_name LIKE "%s" """ % ( value )) for a in g.database.fetchall(): data = {} data['pname']=a[0] data['pid']=a[2] g.database.execute(""" SELECT Username, Name from MuShMe.entries WHERE User_id="%s" """ % a[1]) for k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length = len(search_playlist) + len(search_song) + len(search_friend) + len(search_artist) + len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute("""INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES ("%s","%s")""" % (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route("/playlist/<userid>/deleteplaylist", methods=["POST"]) def deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for playlistid in playlist: g.database.execute("""DELETE FROM playlists WHERE Playlist_id=%s and User_id=%s """ % (playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your profile are belong to us. @app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not found errors @app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices') def tos(): return render_template('tos.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/changepwd') def changepwd(): return render_template('changepwd.html') @app.route('/logout') def logout(): if 'email' not in session: return render_template('error.html') session['logged_in']=False return render_template('login.html') if not app.debug: import logging from logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time: %(asctime)s Message: %(message)s ''')) if __name__ == """__main__""": # To allow aptana to receive errors, set use_debugger=False app = create_app(config="""config.yaml""") if app.debug: use_debugger = True try: # Disable Flask's debugger if external debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass app.run(use_debugger=use_debugger, use_reloader=use_debugger, threaded=True, port=8080)
1.429688
1
nelly/parser.py
shawcx/nelly
0
3448
# # (c) 2008-2020 <NAME> # import sys import os import re import logging import nelly from .scanner import Scanner from .program import Program from .types import * class Parser(object): def __init__(self, include_dirs=[]): self.include_dirs = include_dirs + [ os.path.join(nelly.root, 'grammars') ] self.pwd = [] # setup the scanner based on the regular expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) # container for the compiled program self.program = Program() self.tokens_stack = [] self.groups_stack = [] self.group_stack = [] self.groups = None self.group = None def Parse(self, grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)', grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar) # keep a reference to the tokens for when included files are parsed self.tokens_stack.append(self.tokens) # iterate over all the tokens while self.tokens: (token,value,line,col) = self.tokens.Next() # handle all the top-level tokens if 'nonterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include' == token: self._include() elif 'start_python_code' == token: if r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify pre or post in code section') elif 'start_comment' == token: self._comment() else: raise nelly.error('Unhandled %s %s at %d:%d', token, repr(value), line, col) self.tokens_stack.pop() return self.program def _nonterminal(self, _type, name): # create a new container and add it to the program nonterminal = Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal (token,value,line,col) = self.tokens.Next() # parse any optional arguments for the non-terminal if 'lparen' == token: while True: (token,value,line,col) = self.tokens.Next() if 'rparen' == token: break elif 'comma' == token: continue elif 'option' == token: nonterminal.options.append(value) if value == 'start': self.program.start.append(name) elif 'decorator' == token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option: %s %s', token, value) (token,value,line,col) = self.tokens.Next() if 'colon' != token: raise nelly.error('Parse error, missing colon at line %d, column %d', line, col) # parse zero or more expressions until a semicolon is found self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek() expression = Expression((line,col)) while self.tokens: (token,value,line,col) = self.tokens.Next() if sentinel == token: nonterminal.expressions.append(expression) break elif delimiter == token: nonterminal.expressions.append(expression) expression = Expression((line,col)) elif 'lparen' == token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL, quote) elif token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL, value) elif 'backref' == token: expression.Statement(Types.BACKREFERENCE, value) elif 'function' == token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference' == token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' == token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code' == token: expression.code = self._python_code(nonterminal.name) elif 'lbracket' == token: try: expression.Operation(Types.SLICE, self._slice()) except IndexError: raise nelly.error('Applying slice to nothing at line %d, column %d', line, col) elif 'lcurley' == token: try: expression.Operation(Types.RANGE, self._range()) except IndexError: raise nelly.error('Applying range to nothing at line %d, column %d', line, col) elif 'langle' == token: expression.Weight(self._weight()) elif 'empty' == token: pass else: raise nelly.error('Unhandled token "%s" at line %d, column %d', token, line, col) def _quote(self): # this will always be the quoted value (token,value,line,col) = self.tokens.Next() # this will always be the terminal quote self.tokens.Next() return value # # Slice a string # def _slice(self): front = None back = None start = False (token,value,line,col) = self.tokens.Next() if 'constant' == token: front = value start = True (token,value,line,col) = self.tokens.Next() if 'rbracket' == token: if False == start: raise nelly.error('Empty slice at line %d, column %d', line, col) return (front,front+1) elif 'colon' != token: raise nelly.error('Missing colon at line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: back = value (token,value,line,col) = self.tokens.Next() elif 'rbracket' != token: raise nelly.error('Missing ] at line %d, column %d', line, col) return (front,back) # # Repeat a range # def _range(self): lower = 0 upper = 0 (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing range at line %d, column %d', line, col) lower = value upper = value (token,value,line,col) = self.tokens.Next() if 'rcurley' == token: return (lower,upper) elif 'comma' != token: raise nelly.error('Missing comma at line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: upper = value else: raise nelly.error('Missing range at line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'rcurley' != token: raise nelly.error('Missing } at line %d, column %d', line, col) if lower > upper: lower,upper = upper,lower return (lower,upper) def _weight(self): (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing weight at line %d, column %d', line, col) (token,ignore,line,col) = self.tokens.Next() if 'rangle' != token: raise nelly.error('Missing > at %d, column %d', line, col) return value # # Compile the Python into a code object # def _python_code(self, name): (token,value,line,col) = self.tokens.Next() values = [s for s in value.split('\n') if s.strip()] or [''] # save the whitepsace of the first line ws = re.compile(r'\s*').match(values[0]).group() # check indentation if [s for s in values if not s.startswith(ws)]: raise nelly.error('Bad indentation in code block at line %d, column %d', line, col) # strip and rejoin the code codeblock = '\n'.join(s[len(ws):] for s in values) # eat the end_python_code token self.tokens.Next() try: return compile(codeblock, '<'+name+'>', 'exec') except SyntaxError as e: raise nelly.error('%d: %s: %s', e.lineno, e.msg, repr(e.text)) # # Include other BNF files # def _include(self): (token,value,line,col) = self.tokens.Next() # file names are quoted if token not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file path expected') # get the quoted value path = self._quote() # try opening the file in each include directory, ignore errors content = None for include_dir in self.pwd[-1:] + self.include_dirs: try: fullpath = os.path.join(include_dir, path) content = open(fullpath, 'r') logging.debug('Including file %s', repr(fullpath)) break except: continue # if no file was found, throw an error if None == content: raise nelly.error('Could not load file %s', repr(path)) # ignore empty file if not content: return # compile it inline self.Parse(content) self.pwd.pop() # restore the current tokens self.tokens = self.tokens_stack[-1] # # Multi-line comments # def _comment(self): # consume and disregard the tokens while True: (token,value,line,col) = self.tokens.Next() if 'start_comment' == token: self._comment() if 'end_comment' == token: return
2.203125
2
Plugins/Aspose.Email Java for Python/tests/ProgrammingEmail/ManageAttachments/ManageAttachments.py
aspose-email/Aspose.Email-for-Java
24
3464
# To change this license header, choose License Headers in Project Properties. # To change this template file, choose Tools | Templates # and open the template in the editor. #if __name__ == "__main__": # print "Hello World" from ProgrammingEmail import ManageAttachments import jpype import os.path asposeapispath = os.path.join(os.path.abspath("./../../../"), "lib/") dataDir = os.path.join(os.path.abspath("./"), "data/") print "You need to put your Aspose.Email for Java APIs .jars in this folder:\n"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), "-Djava.ext.dirs=%s" % asposeapispath) hw = ManageAttachments(dataDir) hw.main()
1.164063
1
tools/xkeydump.py
treys/crypto-key-derivation
29
3472
#!./venv/bin/python from lib.mbp32 import XKey from lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print("Version:", xkey.version) print("Depth:", xkey.depth) print("Parent FP:", xkey.parent_fp.hex()) print("Child number:", xkey.child_number_with_tick()) print("Chain code:", xkey.chain_code.hex()) print("Key:", xkey.key) if xkey.key.get_private_bytes(): print("Private bytes:", xkey.key.get_private_bytes().hex()) print("Public bytes:", xkey.key.get_public_bytes().hex()) print("Key ID:", xkey.keyid().hex()) print("XKey:", xkey.to_xkey().decode('ascii'))
1.5
2
scripts/common_lib/build_lib.py
Bhaskers-Blu-Org1/wc-devops-utilities
15
3488
#!/usr/bin/env python3.6 import os import subprocess import json import argparse import zipfile import shutil import requests import datetime import re import operator import unicodedata # global list of error messages to keep track of all error msgs errorMessages = [] """ Collection of Common Functions used by Build Scripts A collection of common functions shared by each individual build scripts. """ def get(url, usr, pwd): """ HTTP/HTTPS GET requests using external Python module requests @param url the url of the REST call @param usr the functional username for the docker registry @param pwd the password for the docker registry functional user @return a JSON response """ headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove the suppressed verification once the docker cert location # is figured out and we specify it in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd), headers=headers, verify=False) def get_latest_tag(registry_path, usr, pwd): """ Retrieve the latest version of an image based on its tags: vX-YYYYMMDD-HHmm. The latest, by definition, is defined to be the one with the highest version number (vX) and the latest timestamp (YYYYMMDD-HHmm). @param registry_path docker registry path @param usr the functional username for the docker registry @param pwd the password for the docker registry functional user @return the latest image tag """ tag_list_url = registry_path + '/tags/list' request = get(tag_list_url, usr, pwd) tag_list = json.loads(request.text) for tag in tag_list['tags']: if '-' not in tag: continue str_version, str_dash, str_timestamp = tag.partition('-') tag_format="%Y%m%d-%H%M" try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue try: latest_version latest_timestamp latest_tag except NameError: latest_version = str_version latest_timestamp = dt_timestamp latest_tag = tag else: if latest_version > str_version: continue elif latest_version < str_version: latest_version = str_version latest_timestamp = dt_timestamp latest_tag = tag else: if latest_timestamp < dt_timestamp: latest_timestamp = dt_timestamp latest_tag = tag return latest_tag def unzip(zip_file, to_dir): """ Generic unzip function for extracting zip files @param zip_file the zip file to be extracted @param to_dir the destination directory to extract the zip file to """ with zipfile.ZipFile(zip_file, "r") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest): """ Creates a dockerfile using the correct docker registry URL associated with the datacenter this script is being run on :param str dockerfile_parent_dir: path to the parent directory for the Dockerfile :param str docker_url: the docker registry VIP accessible from the mesos slaves :param str image_namespace: the name of the image :param str image_name: the name of the image :param str image_tag_latest: the latest version tag of the base image :returns: None """ # Form the path for the Dockerfile based on the parent of the caller script dockerfile_path = os.path.join(dockerfile_parent_dir, "Dockerfile") # Create the Dockerfile dockerfile = open(dockerfile_path, "w+") # Format the FROM command dockerfile_from_cmd = "FROM " + docker_url + image_namespace + "/" + image_name + ":" + image_tag_latest # Write the FROM command string to the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the open file instance dockerfile.close() def set_docker_client_timeout(): """ Sets the DOCKER_CLIENT_TIMEOUT environment variable to 300 """ os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print("The timeout set for docker client: " + os.environ['DOCKER_CLIENT_TIMEOUT'] + " seconds") # ======================= verify bundle Structure =============================================== def openJSONfile(jsonFile): """ Function to open a JSON file @param jsonFile path to the JSON file @return the loaded JSON file """ try: with open(jsonFile) as json_data_file: data = json.load(json_data_file) except: addToErrorMessages("The specified JSON file is not valid: " + jsonFile) raise return data def directoryToJSON(directory): """ Function to convert objects in a given directory into JSON form. The parent object is always a dict, it may contain children if type=directory. A directory is composed of a list and may contain files and/or directories. @param directory directory to convert @return JSON representation of a directory """ d = {'name': os.path.basename(directory)} # the parent object is dict if os.path.isdir(directory): d['type'] = "directory" # directory may have children # the children in a directory is a list composed of more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)] else: d['type'] = "file" return d def verifyBundleStructure(expected, actual, currentPath): """ Function to verify if an uploaded bundle follows IBM defined structure @param expected the JSON representation of the IBM defined structure @param actual the JSON representation of the actual structure of the uploaded bundle @param currentPath the path currently being checked (used to build paths recursively for error msg) @return True if structure of the uploaded bundle follows IBM defined structure. False otherwise. """ isMatched = True if type(expected) is dict: if matches(expected,actual): # a matching file or directory was found if expected['type'] == 'directory': currentPath = currentPath + actual['name'] + "/" if expected['children'] == "_any": isMatched = isMatched & True # if the contents of the directory can be anything then do no further checking else: isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further checking else: # a matching file or directory was not found if expected['fail-if-not-found'] == "yes": logBundleStructureErrorMessage(expected, currentPath) return False if type(expected) is list: for k in range(0,len(expected)): isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return isMatched def logBundleStructureErrorMessage(expected, currentPath): """ Function to adds error messages to the global array. @param expected the expected element @param currentPath the current path we are on that has the missing file or directory """ addToErrorMessages("A "+ expected['type'] +" is missing from the path: \"" + currentPath + "\"") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement): """ Function to check if files/directories match. They must have the same name and must both be the same type. @param expectedElement the expected element. May be defined by regular expression @param actualElement the actual element """ ret = False if re.fullmatch(expectedElement['name'], actualElement['name']) is not None and expectedElement['type'] == actualElement['type']: ret = True return ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): """ Function to verify if an actual list of objects contains an expected element. Helper method to verifyBundleStructure. @param actual list of the actual files and directories in the bundle @param expectedElement the expected element to find in the bundle @param currentPath the path currently being checked (used to build paths recursively for error msg) @param isMatched (only used for recursive calls) @return True if the list of actual objects contain the expected element """ # if actual is a dict then verify it and its children if type(actual) is dict: isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath) # if actual is a list then find out if they match anywhere, if so get the matched position elif type(actual) is list: matchedPosition = -1 for i in range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition = i break if matchedPosition != -1: # if they match then verify their children too isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else : # if they don't match then log the error msg and return false if expectedElement['fail-if-not-found'] == "yes": # log error msg and return false if needed isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def addToErrorMessages(errorMessage): """ Function to add error messages to the global list of errorMessages @param errorMessage the error message to add """ print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo): """ Function to unzip a ZIP file recursively @param zipFileName the zip file to be extracted @param directoryToUnzipTo the destination directory to extract the zip file to """ # update if zipFileName.endswith(".zip"): #check if it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath): """ Function to test if a ZIP file is good or bad @param filePath the zip file to be tested @return True if the ZIP file is good. False otherwise. """ ret = True try: the_zip_file = zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if badFile is not None: ret = False else: ret = True except: ret = False return ret def verifyZipFile(zipDirectory, nameOfBundle): """ Function to verify if an uploaded bundle is: 1) a valid zip file 2) follows IBM defined structure @param zipDirectory where the bundle ZIP is located @param nameOfBundle name of the bundle ZIP file """ print ('Validating bundle structure...') bundleIsGood = True bundleZip = os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try: # copy bundle into new working directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, "temp") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure of bundle ------------------------------------------------------------------------ # check package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, "bundle-definition.json")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the unzipped directory to JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, "") if not bundleIsGood: addToErrorMessages("The uploaded bundle does not meet predefined structure. Could not proceed with deployment.") # clean up unzipped stuff and package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages("Exception occurred while verifying bundle structure. Could not proceed with deployment.") bundleIsGood = False else: bundleIsGood = False addToErrorMessages("The uploaded bundle could not be unzipped. Could not proceed with deployment.") # out put report value , join all the messages together print ("report=[" + ". ".join(str(x) for x in errorMessages) + "]") return bundleIsGood
1.570313
2
json_schema_checker/composed/__init__.py
zorgulle/json_schema_checker
0
3496
from .composed import List from .composed import IntList
0.296875
0
platypus/tests/test_operators.py
sctiwari/EZFF_ASE
2
3520
# Copyright 2015-2018 <NAME> # # This file is part of Platypus, a Python module for designing and using # evolutionary algorithms (EAs) and multiobjective evolutionary algorithms # (MOEAs). # # Platypus is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Platypus is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Platypus. If not, see <http://www.gnu.org/licenses/>. import unittest from mock import patch from ..core import Problem, Solution from ..types import Permutation from ..operators import Swap class TestSwap(unittest.TestCase): def test_swap10(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(10)) solution = Solution(problem) solution.variables[0] = list(range(10)) with patch('random.randrange', side_effect=[2, 4]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2)) solution = Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[0, 1]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap2b(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2)) solution = Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[1, 1, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap1(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(1)) solution = Solution(problem) solution.variables[0] = list(range(1)) with patch('random.randrange', side_effect=[0, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 0)
2.015625
2
pysc2/lib/actions.py
javierrcc522/starcraft2_api_machineLear
2
3528
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Define the static list of types and actions for SC2.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import numbers import six from pysc2.lib import point from s2clientprotocol import spatial_pb2 as sc_spatial from s2clientprotocol import ui_pb2 as sc_ui def no_op(action): del action def move_camera(action, minimap): """Move the camera.""" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act, screen): """Select a unit at a point.""" select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type = select_point_act def select_rect(action, select_add, screen, screen2): """Select units within a rectangle.""" select = action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add() screen_rect = point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add) def select_idle_worker(action, select_worker): """Select an idle worker.""" action.action_ui.select_idle_worker.type = select_worker def select_army(action, select_add): """Select the entire army.""" action.action_ui.select_army.selection_add = select_add def select_warp_gates(action, select_add): """Select all warp gates.""" action.action_ui.select_warp_gates.selection_add = select_add def select_larva(action): """Select all larva.""" action.action_ui.select_larva.SetInParent() # Adds the empty proto field. def select_unit(action, select_unit_act, select_unit_id): """Select a specific unit from the multi-unit selection.""" select = action.action_ui.multi_panel select.type = select_unit_act select.unit_index = select_unit_id def control_group(action, control_group_act, control_group_id): """Act on a control group, selecting, setting, etc.""" select = action.action_ui.control_group select.action = control_group_act select.control_group_index = control_group_id def unload(action, unload_id): """Unload a unit from a transport/bunker/nydus/etc.""" action.action_ui.cargo_panel.unit_index = unload_id def build_queue(action, build_queue_id): """Cancel a unit in the build queue.""" action.action_ui.production_panel.unit_index = build_queue_id def cmd_quick(action, ability_id, queued): """Do a quick command like 'Stop' or 'Stim'.""" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued def cmd_screen(action, ability_id, queued, screen): """Do a command that needs a point on the screen.""" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id, queued, minimap): """Do a command that needs a point on the minimap.""" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id): """Toggle autocast.""" action.action_ui.toggle_autocast.ability_id = ability_id class ArgumentType(collections.namedtuple( "ArgumentType", ["id", "name", "sizes", "fn"])): """Represents a single argument type. Attributes: id: The argument id. This is unique. name: The name of the argument, also unique. sizes: The max+1 of each of the dimensions this argument takes. fn: The function to convert the list of integers into something more meaningful to be set in the protos to send to the game. """ __slots__ = () def __str__(self): return "%s/%s %s" % (self.id, self.name, list(self.sizes)) @classmethod def enum(cls, options): """Create an ArgumentType where you choose one of a set of known values.""" return cls(-1, "<none>", (len(options),), lambda a: options[a[0]]) @classmethod def scalar(cls, value): """Create an ArgumentType with a single scalar in range(value).""" return cls(-1, "<none>", (value,), lambda a: a[0]) @classmethod def point(cls): # No range because it's unknown at this time. """Create an ArgumentType that is represented by a point.Point.""" return cls(-1, "<none>", (0, 0), lambda a: point.Point(*a).floor()) @classmethod def spec(cls, id_, name, sizes): """Create an ArgumentType to be used in ValidActions.""" return cls(id_, name, sizes, None) class Arguments(collections.namedtuple("Arguments", [ "screen", "minimap", "screen2", "queued", "control_group_act", "control_group_id", "select_point_act", "select_add", "select_unit_act", "select_unit_id", "select_worker", "build_queue_id", "unload_id"])): """The full list of argument types. Take a look at TYPES and FUNCTION_TYPES for more details. Attributes: screen: A point on the screen. minimap: A point on the minimap. screen2: The second point for a rectangle. This is needed so that no function takes the same type twice. queued: Whether the action should be done now or later. control_group_act: What to do with the control group. control_group_id: Which control group to do it with. select_point_act: What to do with the unit at the point. select_add: Whether to add the unit to the selection or replace it. select_unit_act: What to do when selecting a unit by id. select_unit_id: Which unit to select by id. select_worker: What to do when selecting a worker. build_queue_id: Which build queue index to target. unload_id: Which unit to target in a transport/nydus/command center. """ ___slots__ = () @classmethod def types(cls, **kwargs): """Create an Arguments of the possible Types.""" named = {name: type_._replace(id=Arguments._fields.index(name), name=name) for name, type_ in six.iteritems(kwargs)} return cls(**named) # The list of known types. TYPES = Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), # (now vs add to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]), # (select vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), # Depends on current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), # Depends on current build queue. unload_id=ArgumentType.scalar(500), # Depends on the current loaded units. ) # Which argument types do each function need? FUNCTION_TYPES = { no_op: [], move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva: [], unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap], autocast: [], } # Which ones need an ability? ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap, autocast} # Which ones require a point? POINT_REQUIRED_FUNCS = { False: {cmd_quick, autocast}, True: {cmd_screen, cmd_minimap, autocast}} always = lambda _: True class Function(collections.namedtuple( "Function", ["id", "name", "ability_id", "general_id", "function_type", "args", "avail_fn"])): """Represents a function action. Attributes: id: The function id, which is what the agent will use. name: The name of the function. Should be unique. ability_id: The ability id to pass to sc2. general_id: 0 for normal abilities, and the ability_id of another ability if it can be represented by a more general action. function_type: One of the functions in FUNCTION_TYPES for how to construct the sc2 action proto out of python types. args: A list of the types of args passed to function_type. avail_fn: For non-abilities, this function returns whether the function is valid. """ __slots__ = () @classmethod def ui_func(cls, id_, name, function_type, avail_fn=always): """Define a function representing a ui action.""" return cls(id_, name, 0, 0, function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod def ability(cls, id_, name, function_type, ability_id, general_id=0): """Define a function represented as a game ability.""" assert function_type in ABILITY_FUNCTIONS return cls(id_, name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None) @classmethod def spec(cls, id_, name, args): """Create a Function to be used in ValidActions.""" return cls(id_, name, None, None, None, args, None) def __hash__(self): # So it can go in a set(). return self.id def __str__(self): return self.str() def str(self, space=False): """String version. Set space=True to line them all up nicely.""" return "%s/%s (%s)" % (str(self.id).rjust(space and 4), self.name.ljust(space and 50), "; ".join(str(a) for a in self.args)) class Functions(object): """Represents the full set of functions. Can't use namedtuple since python3 has a limit of 255 function arguments, so build something similar. """ def __init__(self, functions): self._func_list = functions self._func_dict = {f.name: f for f in functions} if len(self._func_dict) != len(self._func_list): raise ValueError("Function names must be unique.") def __getattr__(self, name): return self._func_dict[name] def __getitem__(self, key): if isinstance(key, numbers.Number): return self._func_list[key] return self._func_dict[key] def __iter__(self): return iter(self._func_list) def __len__(self): return len(self._func_list) # pylint: disable=line-too-long FUNCTIONS = Functions([ Function.ui_func(0, "no_op", no_op), Function.ui_func(1, "move_camera", move_camera), Function.ui_func(2, "select_point", select_point), Function.ui_func(3, "select_rect", select_rect), Function.ui_func(4, "select_control_group", control_group), Function.ui_func(5, "select_unit", select_unit, lambda obs: obs.ui_data.HasField("multi")), Function.ui_func(6, "select_idle_worker", select_idle_worker, lambda obs: obs.player_common.idle_worker_count > 0), Function.ui_func(7, "select_army", select_army, lambda obs: obs.player_common.army_count > 0), Function.ui_func(8, "select_warp_gates", select_warp_gates, lambda obs: obs.player_common.warp_gate_count > 0), Function.ui_func(9, "select_larva", select_larva, lambda obs: obs.player_common.larva_count > 0), Function.ui_func(10, "unload", unload, lambda obs: obs.ui_data.HasField("cargo")), Function.ui_func(11, "build_queue", build_queue, lambda obs: obs.ui_data.HasField("production")), # Everything below here is generated with gen_actions.py Function.ability(12, "Attack_screen", cmd_screen, 3674), Function.ability(13, "Attack_minimap", cmd_minimap, 3674), Function.ability(14, "Attack_Attack_screen", cmd_screen, 23, 3674), Function.ability(15, "Attack_Attack_minimap", cmd_minimap, 23, 3674), Function.ability(16, "Attack_AttackBuilding_screen", cmd_screen, 2048, 3674), Function.ability(17, "Attack_AttackBuilding_minimap", cmd_minimap, 2048, 3674), Function.ability(18, "Attack_Redirect_screen", cmd_screen, 1682, 3674), Function.ability(19, "Scan_Move_screen", cmd_screen, 19, 3674), Function.ability(20, "Scan_Move_minimap", cmd_minimap, 19, 3674), Function.ability(21, "Behavior_BuildingAttackOff_quick", cmd_quick, 2082), Function.ability(22, "Behavior_BuildingAttackOn_quick", cmd_quick, 2081), Function.ability(23, "Behavior_CloakOff_quick", cmd_quick, 3677), Function.ability(24, "Behavior_CloakOff_Banshee_quick", cmd_quick, 393, 3677), Function.ability(25, "Behavior_CloakOff_Ghost_quick", cmd_quick, 383, 3677), Function.ability(26, "Behavior_CloakOn_quick", cmd_quick, 3676), Function.ability(27, "Behavior_CloakOn_Banshee_quick", cmd_quick, 392, 3676), Function.ability(28, "Behavior_CloakOn_Ghost_quick", cmd_quick, 382, 3676), Function.ability(29, "Behavior_GenerateCreepOff_quick", cmd_quick, 1693), Function.ability(30, "Behavior_GenerateCreepOn_quick", cmd_quick, 1692), Function.ability(31, "Behavior_HoldFireOff_quick", cmd_quick, 3689), Function.ability(32, "Behavior_HoldFireOff_Ghost_quick", cmd_quick, 38, 3689), Function.ability(33, "Behavior_HoldFireOff_Lurker_quick", cmd_quick, 2552, 3689), Function.ability(34, "Behavior_HoldFireOn_quick", cmd_quick, 3688), Function.ability(35, "Behavior_HoldFireOn_Ghost_quick", cmd_quick, 36, 3688), Function.ability(36, "Behavior_HoldFireOn_Lurker_quick", cmd_quick, 2550, 3688), Function.ability(37, "Behavior_PulsarBeamOff_quick", cmd_quick, 2376), Function.ability(38, "Behavior_PulsarBeamOn_quick", cmd_quick, 2375), Function.ability(39, "Build_Armory_screen", cmd_screen, 331), Function.ability(40, "Build_Assimilator_screen", cmd_screen, 882), Function.ability(41, "Build_BanelingNest_screen", cmd_screen, 1162), Function.ability(42, "Build_Barracks_screen", cmd_screen, 321), Function.ability(43, "Build_Bunker_screen", cmd_screen, 324), Function.ability(44, "Build_CommandCenter_screen", cmd_screen, 318), Function.ability(45, "Build_CreepTumor_screen", cmd_screen, 3691), Function.ability(46, "Build_CreepTumor_Queen_screen", cmd_screen, 1694, 3691), Function.ability(47, "Build_CreepTumor_Tumor_screen", cmd_screen, 1733, 3691), Function.ability(48, "Build_CyberneticsCore_screen", cmd_screen, 894), Function.ability(49, "Build_DarkShrine_screen", cmd_screen, 891), Function.ability(50, "Build_EngineeringBay_screen", cmd_screen, 322), Function.ability(51, "Build_EvolutionChamber_screen", cmd_screen, 1156), Function.ability(52, "Build_Extractor_screen", cmd_screen, 1154), Function.ability(53, "Build_Factory_screen", cmd_screen, 328), Function.ability(54, "Build_FleetBeacon_screen", cmd_screen, 885), Function.ability(55, "Build_Forge_screen", cmd_screen, 884), Function.ability(56, "Build_FusionCore_screen", cmd_screen, 333), Function.ability(57, "Build_Gateway_screen", cmd_screen, 883), Function.ability(58, "Build_GhostAcademy_screen", cmd_screen, 327), Function.ability(59, "Build_Hatchery_screen", cmd_screen, 1152), Function.ability(60, "Build_HydraliskDen_screen", cmd_screen, 1157), Function.ability(61, "Build_InfestationPit_screen", cmd_screen, 1160), Function.ability(62, "Build_Interceptors_quick", cmd_quick, 1042), Function.ability(63, "Build_Interceptors_autocast", autocast, 1042), Function.ability(64, "Build_MissileTurret_screen", cmd_screen, 323), Function.ability(65, "Build_Nexus_screen", cmd_screen, 880), Function.ability(66, "Build_Nuke_quick", cmd_quick, 710), Function.ability(67, "Build_NydusNetwork_screen", cmd_screen, 1161), Function.ability(68, "Build_NydusWorm_screen", cmd_screen, 1768), Function.ability(69, "Build_PhotonCannon_screen", cmd_screen, 887), Function.ability(70, "Build_Pylon_screen", cmd_screen, 881), Function.ability(71, "Build_Reactor_quick", cmd_quick, 3683), Function.ability(72, "Build_Reactor_screen", cmd_screen, 3683), Function.ability(73, "Build_Reactor_Barracks_quick", cmd_quick, 422, 3683), Function.ability(74, "Build_Reactor_Barracks_screen", cmd_screen, 422, 3683), Function.ability(75, "Build_Reactor_Factory_quick", cmd_quick, 455, 3683), Function.ability(76, "Build_Reactor_Factory_screen", cmd_screen, 455, 3683), Function.ability(77, "Build_Reactor_Starport_quick", cmd_quick, 488, 3683), Function.ability(78, "Build_Reactor_Starport_screen", cmd_screen, 488, 3683), Function.ability(79, "Build_Refinery_screen", cmd_screen, 320), Function.ability(80, "Build_RoachWarren_screen", cmd_screen, 1165), Function.ability(81, "Build_RoboticsBay_screen", cmd_screen, 892), Function.ability(82, "Build_RoboticsFacility_screen", cmd_screen, 893), Function.ability(83, "Build_SensorTower_screen", cmd_screen, 326), Function.ability(84, "Build_SpawningPool_screen", cmd_screen, 1155), Function.ability(85, "Build_SpineCrawler_screen", cmd_screen, 1166), Function.ability(86, "Build_Spire_screen", cmd_screen, 1158), Function.ability(87, "Build_SporeCrawler_screen", cmd_screen, 1167), Function.ability(88, "Build_Stargate_screen", cmd_screen, 889), Function.ability(89, "Build_Starport_screen", cmd_screen, 329), Function.ability(90, "Build_StasisTrap_screen", cmd_screen, 2505), Function.ability(91, "Build_SupplyDepot_screen", cmd_screen, 319), Function.ability(92, "Build_TechLab_quick", cmd_quick, 3682), Function.ability(93, "Build_TechLab_screen", cmd_screen, 3682), Function.ability(94, "Build_TechLab_Barracks_quick", cmd_quick, 421, 3682), Function.ability(95, "Build_TechLab_Barracks_screen", cmd_screen, 421, 3682), Function.ability(96, "Build_TechLab_Factory_quick", cmd_quick, 454, 3682), Function.ability(97, "Build_TechLab_Factory_screen", cmd_screen, 454, 3682), Function.ability(98, "Build_TechLab_Starport_quick", cmd_quick, 487, 3682), Function.ability(99, "Build_TechLab_Starport_screen", cmd_screen, 487, 3682), Function.ability(100, "Build_TemplarArchive_screen", cmd_screen, 890), Function.ability(101, "Build_TwilightCouncil_screen", cmd_screen, 886), Function.ability(102, "Build_UltraliskCavern_screen", cmd_screen, 1159), Function.ability(103, "BurrowDown_quick", cmd_quick, 3661), Function.ability(104, "BurrowDown_Baneling_quick", cmd_quick, 1374, 3661), Function.ability(105, "BurrowDown_Drone_quick", cmd_quick, 1378, 3661), Function.ability(106, "BurrowDown_Hydralisk_quick", cmd_quick, 1382, 3661), Function.ability(107, "BurrowDown_Infestor_quick", cmd_quick, 1444, 3661), Function.ability(108, "BurrowDown_InfestorTerran_quick", cmd_quick, 1394, 3661), Function.ability(109, "BurrowDown_Lurker_quick", cmd_quick, 2108, 3661), Function.ability(110, "BurrowDown_Queen_quick", cmd_quick, 1433, 3661), Function.ability(111, "BurrowDown_Ravager_quick", cmd_quick, 2340, 3661), Function.ability(112, "BurrowDown_Roach_quick", cmd_quick, 1386, 3661), Function.ability(113, "BurrowDown_SwarmHost_quick", cmd_quick, 2014, 3661), Function.ability(114, "BurrowDown_Ultralisk_quick", cmd_quick, 1512, 3661), Function.ability(115, "BurrowDown_WidowMine_quick", cmd_quick, 2095, 3661), Function.ability(116, "BurrowDown_Zergling_quick", cmd_quick, 1390, 3661), Function.ability(117, "BurrowUp_quick", cmd_quick, 3662), Function.ability(118, "BurrowUp_autocast", autocast, 3662), Function.ability(119, "BurrowUp_Baneling_quick", cmd_quick, 1376, 3662), Function.ability(120, "BurrowUp_Baneling_autocast", autocast, 1376, 3662), Function.ability(121, "BurrowUp_Drone_quick", cmd_quick, 1380, 3662), Function.ability(122, "BurrowUp_Hydralisk_quick", cmd_quick, 1384, 3662), Function.ability(123, "BurrowUp_Hydralisk_autocast", autocast, 1384, 3662), Function.ability(124, "BurrowUp_Infestor_quick", cmd_quick, 1446, 3662), Function.ability(125, "BurrowUp_InfestorTerran_quick", cmd_quick, 1396, 3662), Function.ability(126, "BurrowUp_InfestorTerran_autocast", autocast, 1396, 3662), Function.ability(127, "BurrowUp_Lurker_quick", cmd_quick, 2110, 3662), Function.ability(128, "BurrowUp_Queen_quick", cmd_quick, 1435, 3662), Function.ability(129, "BurrowUp_Queen_autocast", autocast, 1435, 3662), Function.ability(130, "BurrowUp_Ravager_quick", cmd_quick, 2342, 3662), Function.ability(131, "BurrowUp_Ravager_autocast", autocast, 2342, 3662), Function.ability(132, "BurrowUp_Roach_quick", cmd_quick, 1388, 3662), Function.ability(133, "BurrowUp_Roach_autocast", autocast, 1388, 3662), Function.ability(134, "BurrowUp_SwarmHost_quick", cmd_quick, 2016, 3662), Function.ability(135, "BurrowUp_Ultralisk_quick", cmd_quick, 1514, 3662), Function.ability(136, "BurrowUp_Ultralisk_autocast", autocast, 1514, 3662), Function.ability(137, "BurrowUp_WidowMine_quick", cmd_quick, 2097, 3662), Function.ability(138, "BurrowUp_Zergling_quick", cmd_quick, 1392, 3662), Function.ability(139, "BurrowUp_Zergling_autocast", autocast, 1392, 3662), Function.ability(140, "Cancel_quick", cmd_quick, 3659), Function.ability(141, "Cancel_AdeptPhaseShift_quick", cmd_quick, 2594, 3659), Function.ability(142, "Cancel_AdeptShadePhaseShift_quick", cmd_quick, 2596, 3659), Function.ability(143, "Cancel_BarracksAddOn_quick", cmd_quick, 451, 3659), Function.ability(144, "Cancel_BuildInProgress_quick", cmd_quick, 314, 3659), Function.ability(145, "Cancel_CreepTumor_quick", cmd_quick, 1763, 3659), Function.ability(146, "Cancel_FactoryAddOn_quick", cmd_quick, 484, 3659), Function.ability(147, "Cancel_GravitonBeam_quick", cmd_quick, 174, 3659), Function.ability(148, "Cancel_LockOn_quick", cmd_quick, 2354, 3659), Function.ability(149, "Cancel_MorphBroodlord_quick", cmd_quick, 1373, 3659), Function.ability(150, "Cancel_MorphGreaterSpire_quick", cmd_quick, 1221, 3659), Function.ability(151, "Cancel_MorphHive_quick", cmd_quick, 1219, 3659), Function.ability(152, "Cancel_MorphLair_quick", cmd_quick, 1217, 3659), Function.ability(153, "Cancel_MorphLurker_quick", cmd_quick, 2333, 3659), Function.ability(154, "Cancel_MorphLurkerDen_quick", cmd_quick, 2113, 3659), Function.ability(155, "Cancel_MorphMothership_quick", cmd_quick, 1848, 3659), Function.ability(156, "Cancel_MorphOrbital_quick", cmd_quick, 1517, 3659), Function.ability(157, "Cancel_MorphOverlordTransport_quick", cmd_quick, 2709, 3659), Function.ability(158, "Cancel_MorphOverseer_quick", cmd_quick, 1449, 3659), Function.ability(159, "Cancel_MorphPlanetaryFortress_quick", cmd_quick, 1451, 3659), Function.ability(160, "Cancel_MorphRavager_quick", cmd_quick, 2331, 3659), Function.ability(161, "Cancel_MorphThorExplosiveMode_quick", cmd_quick, 2365, 3659), Function.ability(162, "Cancel_NeuralParasite_quick", cmd_quick, 250, 3659), Function.ability(163, "Cancel_Nuke_quick", cmd_quick, 1623, 3659), Function.ability(164, "Cancel_SpineCrawlerRoot_quick", cmd_quick, 1730, 3659), Function.ability(165, "Cancel_SporeCrawlerRoot_quick", cmd_quick, 1732, 3659), Function.ability(166, "Cancel_StarportAddOn_quick", cmd_quick, 517, 3659), Function.ability(167, "Cancel_StasisTrap_quick", cmd_quick, 2535, 3659), Function.ability(168, "Cancel_Last_quick", cmd_quick, 3671), Function.ability(169, "Cancel_HangarQueue5_quick", cmd_quick, 1038, 3671), Function.ability(170, "Cancel_Queue1_quick", cmd_quick, 304, 3671), Function.ability(171, "Cancel_Queue5_quick", cmd_quick, 306, 3671), Function.ability(172, "Cancel_QueueAddOn_quick", cmd_quick, 312, 3671), Function.ability(173, "Cancel_QueueCancelToSelection_quick", cmd_quick, 308, 3671), Function.ability(174, "Cancel_QueuePasive_quick", cmd_quick, 1831, 3671), Function.ability(175, "Cancel_QueuePassiveCancelToSelection_quick", cmd_quick, 1833, 3671), Function.ability(176, "Effect_Abduct_screen", cmd_screen, 2067), Function.ability(177, "Effect_AdeptPhaseShift_screen", cmd_screen, 2544), Function.ability(178, "Effect_AutoTurret_screen", cmd_screen, 1764), Function.ability(179, "Effect_BlindingCloud_screen", cmd_screen, 2063), Function.ability(180, "Effect_Blink_screen", cmd_screen, 3687), Function.ability(181, "Effect_Blink_Stalker_screen", cmd_screen, 1442, 3687), Function.ability(182, "Effect_ShadowStride_screen", cmd_screen, 2700, 3687), Function.ability(183, "Effect_CalldownMULE_screen", cmd_screen, 171), Function.ability(184, "Effect_CausticSpray_screen", cmd_screen, 2324), Function.ability(185, "Effect_Charge_screen", cmd_screen, 1819), Function.ability(186, "Effect_Charge_autocast", autocast, 1819), Function.ability(187, "Effect_ChronoBoost_screen", cmd_screen, 261), Function.ability(188, "Effect_Contaminate_screen", cmd_screen, 1825), Function.ability(189, "Effect_CorrosiveBile_screen", cmd_screen, 2338), Function.ability(190, "Effect_EMP_screen", cmd_screen, 1628), Function.ability(191, "Effect_Explode_quick", cmd_quick, 42), Function.ability(192, "Effect_Feedback_screen", cmd_screen, 140), Function.ability(193, "Effect_ForceField_screen", cmd_screen, 1526), Function.ability(194, "Effect_FungalGrowth_screen", cmd_screen, 74), Function.ability(195, "Effect_GhostSnipe_screen", cmd_screen, 2714), Function.ability(196, "Effect_GravitonBeam_screen", cmd_screen, 173), Function.ability(197, "Effect_GuardianShield_quick", cmd_quick, 76), Function.ability(198, "Effect_Heal_screen", cmd_screen, 386), Function.ability(199, "Effect_Heal_autocast", autocast, 386), Function.ability(200, "Effect_HunterSeekerMissile_screen", cmd_screen, 169), Function.ability(201, "Effect_ImmortalBarrier_quick", cmd_quick, 2328), Function.ability(202, "Effect_ImmortalBarrier_autocast", autocast, 2328), Function.ability(203, "Effect_InfestedTerrans_screen", cmd_screen, 247), Function.ability(204, "Effect_InjectLarva_screen", cmd_screen, 251), Function.ability(205, "Effect_KD8Charge_screen", cmd_screen, 2588), Function.ability(206, "Effect_LockOn_screen", cmd_screen, 2350), Function.ability(207, "Effect_LocustSwoop_screen", cmd_screen, 2387), Function.ability(208, "Effect_MassRecall_screen", cmd_screen, 3686), Function.ability(209, "Effect_MassRecall_Mothership_screen", cmd_screen, 2368, 3686), Function.ability(210, "Effect_MassRecall_MothershipCore_screen", cmd_screen, 1974, 3686), Function.ability(211, "Effect_MedivacIgniteAfterburners_quick", cmd_quick, 2116), Function.ability(212, "Effect_NeuralParasite_screen", cmd_screen, 249), Function.ability(213, "Effect_NukeCalldown_screen", cmd_screen, 1622), Function.ability(214, "Effect_OracleRevelation_screen", cmd_screen, 2146), Function.ability(215, "Effect_ParasiticBomb_screen", cmd_screen, 2542), Function.ability(216, "Effect_PhotonOvercharge_screen", cmd_screen, 2162), Function.ability(217, "Effect_PointDefenseDrone_screen", cmd_screen, 144), Function.ability(218, "Effect_PsiStorm_screen", cmd_screen, 1036), Function.ability(219, "Effect_PurificationNova_screen", cmd_screen, 2346), Function.ability(220, "Effect_Repair_screen", cmd_screen, 3685), Function.ability(221, "Effect_Repair_autocast", autocast, 3685), Function.ability(222, "Effect_Repair_Mule_screen", cmd_screen, 78, 3685), Function.ability(223, "Effect_Repair_Mule_autocast", autocast, 78, 3685), Function.ability(224, "Effect_Repair_SCV_screen", cmd_screen, 316, 3685), Function.ability(225, "Effect_Repair_SCV_autocast", autocast, 316, 3685), Function.ability(226, "Effect_Salvage_quick", cmd_quick, 32), Function.ability(227, "Effect_Scan_screen", cmd_screen, 399), Function.ability(228, "Effect_SpawnChangeling_quick", cmd_quick, 181), Function.ability(229, "Effect_SpawnLocusts_screen", cmd_screen, 2704), Function.ability(230, "Effect_Spray_screen", cmd_screen, 3684), Function.ability(231, "Effect_Spray_Protoss_screen", cmd_screen, 30, 3684), Function.ability(232, "Effect_Spray_Terran_screen", cmd_screen, 26, 3684), Function.ability(233, "Effect_Spray_Zerg_screen", cmd_screen, 28, 3684), Function.ability(234, "Effect_Stim_quick", cmd_quick, 3675), Function.ability(235, "Effect_Stim_Marauder_quick", cmd_quick, 253, 3675), Function.ability(236, "Effect_Stim_Marauder_Redirect_quick", cmd_quick, 1684, 3675), Function.ability(237, "Effect_Stim_Marine_quick", cmd_quick, 380, 3675), Function.ability(238, "Effect_Stim_Marine_Redirect_quick", cmd_quick, 1683, 3675), Function.ability(239, "Effect_SupplyDrop_screen", cmd_screen, 255), Function.ability(240, "Effect_TacticalJump_screen", cmd_screen, 2358), Function.ability(241, "Effect_TimeWarp_screen", cmd_screen, 2244), Function.ability(242, "Effect_Transfusion_screen", cmd_screen, 1664), Function.ability(243, "Effect_ViperConsume_screen", cmd_screen, 2073), Function.ability(244, "Effect_VoidRayPrismaticAlignment_quick", cmd_quick, 2393), Function.ability(245, "Effect_WidowMineAttack_screen", cmd_screen, 2099), Function.ability(246, "Effect_WidowMineAttack_autocast", autocast, 2099), Function.ability(247, "Effect_YamatoGun_screen", cmd_screen, 401), Function.ability(248, "Hallucination_Adept_quick", cmd_quick, 2391), Function.ability(249, "Hallucination_Archon_quick", cmd_quick, 146), Function.ability(250, "Hallucination_Colossus_quick", cmd_quick, 148), Function.ability(251, "Hallucination_Disruptor_quick", cmd_quick, 2389), Function.ability(252, "Hallucination_HighTemplar_quick", cmd_quick, 150), Function.ability(253, "Hallucination_Immortal_quick", cmd_quick, 152), Function.ability(254, "Hallucination_Oracle_quick", cmd_quick, 2114), Function.ability(255, "Hallucination_Phoenix_quick", cmd_quick, 154), Function.ability(256, "Hallucination_Probe_quick", cmd_quick, 156), Function.ability(257, "Hallucination_Stalker_quick", cmd_quick, 158), Function.ability(258, "Hallucination_VoidRay_quick", cmd_quick, 160), Function.ability(259, "Hallucination_WarpPrism_quick", cmd_quick, 162), Function.ability(260, "Hallucination_Zealot_quick", cmd_quick, 164), Function.ability(261, "Halt_quick", cmd_quick, 3660), Function.ability(262, "Halt_Building_quick", cmd_quick, 315, 3660), Function.ability(263, "Halt_TerranBuild_quick", cmd_quick, 348, 3660), Function.ability(264, "Harvest_Gather_screen", cmd_screen, 3666), Function.ability(265, "Harvest_Gather_Drone_screen", cmd_screen, 1183, 3666), Function.ability(266, "Harvest_Gather_Mule_screen", cmd_screen, 166, 3666), Function.ability(267, "Harvest_Gather_Probe_screen", cmd_screen, 298, 3666), Function.ability(268, "Harvest_Gather_SCV_screen", cmd_screen, 295, 3666), Function.ability(269, "Harvest_Return_quick", cmd_quick, 3667), Function.ability(270, "Harvest_Return_Drone_quick", cmd_quick, 1184, 3667), Function.ability(271, "Harvest_Return_Mule_quick", cmd_quick, 167, 3667), Function.ability(272, "Harvest_Return_Probe_quick", cmd_quick, 299, 3667), Function.ability(273, "Harvest_Return_SCV_quick", cmd_quick, 296, 3667), Function.ability(274, "HoldPosition_quick", cmd_quick, 18), Function.ability(275, "Land_screen", cmd_screen, 3678), Function.ability(276, "Land_Barracks_screen", cmd_screen, 554, 3678), Function.ability(277, "Land_CommandCenter_screen", cmd_screen, 419, 3678), Function.ability(278, "Land_Factory_screen", cmd_screen, 520, 3678), Function.ability(279, "Land_OrbitalCommand_screen", cmd_screen, 1524, 3678), Function.ability(280, "Land_Starport_screen", cmd_screen, 522, 3678), Function.ability(281, "Lift_quick", cmd_quick, 3679), Function.ability(282, "Lift_Barracks_quick", cmd_quick, 452, 3679), Function.ability(283, "Lift_CommandCenter_quick", cmd_quick, 417, 3679), Function.ability(284, "Lift_Factory_quick", cmd_quick, 485, 3679), Function.ability(285, "Lift_OrbitalCommand_quick", cmd_quick, 1522, 3679), Function.ability(286, "Lift_Starport_quick", cmd_quick, 518, 3679), Function.ability(287, "Load_screen", cmd_screen, 3668), Function.ability(288, "Load_Bunker_screen", cmd_screen, 407, 3668), Function.ability(289, "Load_Medivac_screen", cmd_screen, 394, 3668), Function.ability(290, "Load_NydusNetwork_screen", cmd_screen, 1437, 3668), Function.ability(291, "Load_NydusWorm_screen", cmd_screen, 2370, 3668), Function.ability(292, "Load_Overlord_screen", cmd_screen, 1406, 3668), Function.ability(293, "Load_WarpPrism_screen", cmd_screen, 911, 3668), Function.ability(294, "LoadAll_quick", cmd_quick, 3663), Function.ability(295, "LoadAll_CommandCenter_quick", cmd_quick, 416, 3663), Function.ability(296, "Morph_Archon_quick", cmd_quick, 1766), Function.ability(297, "Morph_BroodLord_quick", cmd_quick, 1372), Function.ability(298, "Morph_Gateway_quick", cmd_quick, 1520), Function.ability(299, "Morph_GreaterSpire_quick", cmd_quick, 1220), Function.ability(300, "Morph_Hellbat_quick", cmd_quick, 1998), Function.ability(301, "Morph_Hellion_quick", cmd_quick, 1978), Function.ability(302, "Morph_Hive_quick", cmd_quick, 1218), Function.ability(303, "Morph_Lair_quick", cmd_quick, 1216), Function.ability(304, "Morph_LiberatorAAMode_quick", cmd_quick, 2560), Function.ability(305, "Morph_LiberatorAGMode_screen", cmd_screen, 2558), Function.ability(306, "Morph_Lurker_quick", cmd_quick, 2332), Function.ability(307, "Morph_LurkerDen_quick", cmd_quick, 2112), Function.ability(308, "Morph_Mothership_quick", cmd_quick, 1847), Function.ability(309, "Morph_OrbitalCommand_quick", cmd_quick, 1516), Function.ability(310, "Morph_OverlordTransport_quick", cmd_quick, 2708), Function.ability(311, "Morph_Overseer_quick", cmd_quick, 1448), Function.ability(312, "Morph_PlanetaryFortress_quick", cmd_quick, 1450), Function.ability(313, "Morph_Ravager_quick", cmd_quick, 2330), Function.ability(314, "Morph_Root_screen", cmd_screen, 3680), Function.ability(315, "Morph_SpineCrawlerRoot_screen", cmd_screen, 1729, 3680), Function.ability(316, "Morph_SporeCrawlerRoot_screen", cmd_screen, 1731, 3680), Function.ability(317, "Morph_SiegeMode_quick", cmd_quick, 388), Function.ability(318, "Morph_SupplyDepot_Lower_quick", cmd_quick, 556), Function.ability(319, "Morph_SupplyDepot_Raise_quick", cmd_quick, 558), Function.ability(320, "Morph_ThorExplosiveMode_quick", cmd_quick, 2364), Function.ability(321, "Morph_ThorHighImpactMode_quick", cmd_quick, 2362), Function.ability(322, "Morph_Unsiege_quick", cmd_quick, 390), Function.ability(323, "Morph_Uproot_quick", cmd_quick, 3681), Function.ability(324, "Morph_SpineCrawlerUproot_quick", cmd_quick, 1725, 3681), Function.ability(325, "Morph_SporeCrawlerUproot_quick", cmd_quick, 1727, 3681), Function.ability(326, "Morph_VikingAssaultMode_quick", cmd_quick, 403), Function.ability(327, "Morph_VikingFighterMode_quick", cmd_quick, 405), Function.ability(328, "Morph_WarpGate_quick", cmd_quick, 1518), Function.ability(329, "Morph_WarpPrismPhasingMode_quick", cmd_quick, 1528), Function.ability(330, "Morph_WarpPrismTransportMode_quick", cmd_quick, 1530), Function.ability(331, "Move_screen", cmd_screen, 16), Function.ability(332, "Move_minimap", cmd_minimap, 16), Function.ability(333, "Patrol_screen", cmd_screen, 17), Function.ability(334, "Patrol_minimap", cmd_minimap, 17), Function.ability(335, "Rally_Units_screen", cmd_screen, 3673), Function.ability(336, "Rally_Units_minimap", cmd_minimap, 3673), Function.ability(337, "Rally_Building_screen", cmd_screen, 195, 3673), Function.ability(338, "Rally_Building_minimap", cmd_minimap, 195, 3673), Function.ability(339, "Rally_Hatchery_Units_screen", cmd_screen, 212, 3673), Function.ability(340, "Rally_Hatchery_Units_minimap", cmd_minimap, 212, 3673), Function.ability(341, "Rally_Morphing_Unit_screen", cmd_screen, 199, 3673), Function.ability(342, "Rally_Morphing_Unit_minimap", cmd_minimap, 199, 3673), Function.ability(343, "Rally_Workers_screen", cmd_screen, 3690), Function.ability(344, "Rally_Workers_minimap", cmd_minimap, 3690), Function.ability(345, "Rally_CommandCenter_screen", cmd_screen, 203, 3690), Function.ability(346, "Rally_CommandCenter_minimap", cmd_minimap, 203, 3690), Function.ability(347, "Rally_Hatchery_Workers_screen", cmd_screen, 211, 3690), Function.ability(348, "Rally_Hatchery_Workers_minimap", cmd_minimap, 211, 3690), Function.ability(349, "Rally_Nexus_screen", cmd_screen, 207, 3690), Function.ability(350, "Rally_Nexus_minimap", cmd_minimap, 207, 3690), Function.ability(351, "Research_AdeptResonatingGlaives_quick", cmd_quick, 1594), Function.ability(352, "Research_AdvancedBallistics_quick", cmd_quick, 805), Function.ability(353, "Research_BansheeCloakingField_quick", cmd_quick, 790), Function.ability(354, "Research_BansheeHyperflightRotors_quick", cmd_quick, 799), Function.ability(355, "Research_BattlecruiserWeaponRefit_quick", cmd_quick, 1532), Function.ability(356, "Research_Blink_quick", cmd_quick, 1593), Function.ability(357, "Research_Burrow_quick", cmd_quick, 1225), Function.ability(358, "Research_CentrifugalHooks_quick", cmd_quick, 1482), Function.ability(359, "Research_Charge_quick", cmd_quick, 1592), Function.ability(360, "Research_ChitinousPlating_quick", cmd_quick, 265), Function.ability(361, "Research_CombatShield_quick", cmd_quick, 731), Function.ability(362, "Research_ConcussiveShells_quick", cmd_quick, 732), Function.ability(363, "Research_DrillingClaws_quick", cmd_quick, 764), Function.ability(364, "Research_ExtendedThermalLance_quick", cmd_quick, 1097), Function.ability(365, "Research_GlialRegeneration_quick", cmd_quick, 216), Function.ability(366, "Research_GraviticBooster_quick", cmd_quick, 1093), Function.ability(367, "Research_GraviticDrive_quick", cmd_quick, 1094), Function.ability(368, "Research_GroovedSpines_quick", cmd_quick, 1282), Function.ability(369, "Research_HiSecAutoTracking_quick", cmd_quick, 650), Function.ability(370, "Research_HighCapacityFuelTanks_quick", cmd_quick, 804), Function.ability(371, "Research_InfernalPreigniter_quick", cmd_quick, 761), Function.ability(372, "Research_InterceptorGravitonCatapult_quick", cmd_quick, 44), Function.ability(373, "Research_MagFieldLaunchers_quick", cmd_quick, 766), Function.ability(374, "Research_MuscularAugments_quick", cmd_quick, 1283), Function.ability(375, "Research_NeosteelFrame_quick", cmd_quick, 655), Function.ability(376, "Research_NeuralParasite_quick", cmd_quick, 1455), Function.ability(377, "Research_PathogenGlands_quick", cmd_quick, 1454), Function.ability(378, "Research_PersonalCloaking_quick", cmd_quick, 820), Function.ability(379, "Research_PhoenixAnionPulseCrystals_quick", cmd_quick, 46), Function.ability(380, "Research_PneumatizedCarapace_quick", cmd_quick, 1223), Function.ability(381, "Research_ProtossAirArmor_quick", cmd_quick, 3692), Function.ability(382, "Research_ProtossAirArmorLevel1_quick", cmd_quick, 1565, 3692), Function.ability(383, "Research_ProtossAirArmorLevel2_quick", cmd_quick, 1566, 3692), Function.ability(384, "Research_ProtossAirArmorLevel3_quick", cmd_quick, 1567, 3692), Function.ability(385, "Research_ProtossAirWeapons_quick", cmd_quick, 3693), Function.ability(386, "Research_ProtossAirWeaponsLevel1_quick", cmd_quick, 1562, 3693), Function.ability(387, "Research_ProtossAirWeaponsLevel2_quick", cmd_quick, 1563, 3693), Function.ability(388, "Research_ProtossAirWeaponsLevel3_quick", cmd_quick, 1564, 3693), Function.ability(389, "Research_ProtossGroundArmor_quick", cmd_quick, 3694), Function.ability(390, "Research_ProtossGroundArmorLevel1_quick", cmd_quick, 1065, 3694), Function.ability(391, "Research_ProtossGroundArmorLevel2_quick", cmd_quick, 1066, 3694), Function.ability(392, "Research_ProtossGroundArmorLevel3_quick", cmd_quick, 1067, 3694), Function.ability(393, "Research_ProtossGroundWeapons_quick", cmd_quick, 3695), Function.ability(394, "Research_ProtossGroundWeaponsLevel1_quick", cmd_quick, 1062, 3695), Function.ability(395, "Research_ProtossGroundWeaponsLevel2_quick", cmd_quick, 1063, 3695), Function.ability(396, "Research_ProtossGroundWeaponsLevel3_quick", cmd_quick, 1064, 3695), Function.ability(397, "Research_ProtossShields_quick", cmd_quick, 3696), Function.ability(398, "Research_ProtossShieldsLevel1_quick", cmd_quick, 1068, 3696), Function.ability(399, "Research_ProtossShieldsLevel2_quick", cmd_quick, 1069, 3696), Function.ability(400, "Research_ProtossShieldsLevel3_quick", cmd_quick, 1070, 3696), Function.ability(401, "Research_PsiStorm_quick", cmd_quick, 1126), Function.ability(402, "Research_RavenCorvidReactor_quick", cmd_quick, 793), Function.ability(403, "Research_RavenRecalibratedExplosives_quick", cmd_quick, 803), Function.ability(404, "Research_ShadowStrike_quick", cmd_quick, 2720), Function.ability(405, "Research_Stimpack_quick", cmd_quick, 730), Function.ability(406, "Research_TerranInfantryArmor_quick", cmd_quick, 3697), Function.ability(407, "Research_TerranInfantryArmorLevel1_quick", cmd_quick, 656, 3697), Function.ability(408, "Research_TerranInfantryArmorLevel2_quick", cmd_quick, 657, 3697), Function.ability(409, "Research_TerranInfantryArmorLevel3_quick", cmd_quick, 658, 3697), Function.ability(410, "Research_TerranInfantryWeapons_quick", cmd_quick, 3698), Function.ability(411, "Research_TerranInfantryWeaponsLevel1_quick", cmd_quick, 652, 3698), Function.ability(412, "Research_TerranInfantryWeaponsLevel2_quick", cmd_quick, 653, 3698), Function.ability(413, "Research_TerranInfantryWeaponsLevel3_quick", cmd_quick, 654, 3698), Function.ability(414, "Research_TerranShipWeapons_quick", cmd_quick, 3699), Function.ability(415, "Research_TerranShipWeaponsLevel1_quick", cmd_quick, 861, 3699), Function.ability(416, "Research_TerranShipWeaponsLevel2_quick", cmd_quick, 862, 3699), Function.ability(417, "Research_TerranShipWeaponsLevel3_quick", cmd_quick, 863, 3699), Function.ability(418, "Research_TerranStructureArmorUpgrade_quick", cmd_quick, 651), Function.ability(419, "Research_TerranVehicleAndShipPlating_quick", cmd_quick, 3700), Function.ability(420, "Research_TerranVehicleAndShipPlatingLevel1_quick", cmd_quick, 864, 3700), Function.ability(421, "Research_TerranVehicleAndShipPlatingLevel2_quick", cmd_quick, 865, 3700), Function.ability(422, "Research_TerranVehicleAndShipPlatingLevel3_quick", cmd_quick, 866, 3700), Function.ability(423, "Research_TerranVehicleWeapons_quick", cmd_quick, 3701), Function.ability(424, "Research_TerranVehicleWeaponsLevel1_quick", cmd_quick, 855, 3701), Function.ability(425, "Research_TerranVehicleWeaponsLevel2_quick", cmd_quick, 856, 3701), Function.ability(426, "Research_TerranVehicleWeaponsLevel3_quick", cmd_quick, 857, 3701), Function.ability(427, "Research_TunnelingClaws_quick", cmd_quick, 217), Function.ability(428, "Research_WarpGate_quick", cmd_quick, 1568), Function.ability(429, "Research_ZergFlyerArmor_quick", cmd_quick, 3702), Function.ability(430, "Research_ZergFlyerArmorLevel1_quick", cmd_quick, 1315, 3702), Function.ability(431, "Research_ZergFlyerArmorLevel2_quick", cmd_quick, 1316, 3702), Function.ability(432, "Research_ZergFlyerArmorLevel3_quick", cmd_quick, 1317, 3702), Function.ability(433, "Research_ZergFlyerAttack_quick", cmd_quick, 3703), Function.ability(434, "Research_ZergFlyerAttackLevel1_quick", cmd_quick, 1312, 3703), Function.ability(435, "Research_ZergFlyerAttackLevel2_quick", cmd_quick, 1313, 3703), Function.ability(436, "Research_ZergFlyerAttackLevel3_quick", cmd_quick, 1314, 3703), Function.ability(437, "Research_ZergGroundArmor_quick", cmd_quick, 3704), Function.ability(438, "Research_ZergGroundArmorLevel1_quick", cmd_quick, 1189, 3704), Function.ability(439, "Research_ZergGroundArmorLevel2_quick", cmd_quick, 1190, 3704), Function.ability(440, "Research_ZergGroundArmorLevel3_quick", cmd_quick, 1191, 3704), Function.ability(441, "Research_ZergMeleeWeapons_quick", cmd_quick, 3705), Function.ability(442, "Research_ZergMeleeWeaponsLevel1_quick", cmd_quick, 1186, 3705), Function.ability(443, "Research_ZergMeleeWeaponsLevel2_quick", cmd_quick, 1187, 3705), Function.ability(444, "Research_ZergMeleeWeaponsLevel3_quick", cmd_quick, 1188, 3705), Function.ability(445, "Research_ZergMissileWeapons_quick", cmd_quick, 3706), Function.ability(446, "Research_ZergMissileWeaponsLevel1_quick", cmd_quick, 1192, 3706), Function.ability(447, "Research_ZergMissileWeaponsLevel2_quick", cmd_quick, 1193, 3706), Function.ability(448, "Research_ZergMissileWeaponsLevel3_quick", cmd_quick, 1194, 3706), Function.ability(449, "Research_ZerglingAdrenalGlands_quick", cmd_quick, 1252), Function.ability(450, "Research_ZerglingMetabolicBoost_quick", cmd_quick, 1253), Function.ability(451, "Smart_screen", cmd_screen, 1), Function.ability(452, "Smart_minimap", cmd_minimap, 1), Function.ability(453, "Stop_quick", cmd_quick, 3665), Function.ability(454, "Stop_Building_quick", cmd_quick, 2057, 3665), Function.ability(455, "Stop_Redirect_quick", cmd_quick, 1691, 3665), Function.ability(456, "Stop_Stop_quick", cmd_quick, 4, 3665), Function.ability(457, "Train_Adept_quick", cmd_quick, 922), Function.ability(458, "Train_Baneling_quick", cmd_quick, 80), Function.ability(459, "Train_Banshee_quick", cmd_quick, 621), Function.ability(460, "Train_Battlecruiser_quick", cmd_quick, 623), Function.ability(461, "Train_Carrier_quick", cmd_quick, 948), Function.ability(462, "Train_Colossus_quick", cmd_quick, 978), Function.ability(463, "Train_Corruptor_quick", cmd_quick, 1353), Function.ability(464, "Train_Cyclone_quick", cmd_quick, 597), Function.ability(465, "Train_DarkTemplar_quick", cmd_quick, 920), Function.ability(466, "Train_Disruptor_quick", cmd_quick, 994), Function.ability(467, "Train_Drone_quick", cmd_quick, 1342), Function.ability(468, "Train_Ghost_quick", cmd_quick, 562), Function.ability(469, "Train_Hellbat_quick", cmd_quick, 596), Function.ability(470, "Train_Hellion_quick", cmd_quick, 595), Function.ability(471, "Train_HighTemplar_quick", cmd_quick, 919), Function.ability(472, "Train_Hydralisk_quick", cmd_quick, 1345), Function.ability(473, "Train_Immortal_quick", cmd_quick, 979), Function.ability(474, "Train_Infestor_quick", cmd_quick, 1352), Function.ability(475, "Train_Liberator_quick", cmd_quick, 626), Function.ability(476, "Train_Marauder_quick", cmd_quick, 563), Function.ability(477, "Train_Marine_quick", cmd_quick, 560), Function.ability(478, "Train_Medivac_quick", cmd_quick, 620), Function.ability(479, "Train_MothershipCore_quick", cmd_quick, 1853), Function.ability(480, "Train_Mutalisk_quick", cmd_quick, 1346), Function.ability(481, "Train_Observer_quick", cmd_quick, 977), Function.ability(482, "Train_Oracle_quick", cmd_quick, 954), Function.ability(483, "Train_Overlord_quick", cmd_quick, 1344), Function.ability(484, "Train_Phoenix_quick", cmd_quick, 946), Function.ability(485, "Train_Probe_quick", cmd_quick, 1006), Function.ability(486, "Train_Queen_quick", cmd_quick, 1632), Function.ability(487, "Train_Raven_quick", cmd_quick, 622), Function.ability(488, "Train_Reaper_quick", cmd_quick, 561), Function.ability(489, "Train_Roach_quick", cmd_quick, 1351), Function.ability(490, "Train_SCV_quick", cmd_quick, 524), Function.ability(491, "Train_Sentry_quick", cmd_quick, 921), Function.ability(492, "Train_SiegeTank_quick", cmd_quick, 591), Function.ability(493, "Train_Stalker_quick", cmd_quick, 917), Function.ability(494, "Train_SwarmHost_quick", cmd_quick, 1356), Function.ability(495, "Train_Tempest_quick", cmd_quick, 955), Function.ability(496, "Train_Thor_quick", cmd_quick, 594), Function.ability(497, "Train_Ultralisk_quick", cmd_quick, 1348), Function.ability(498, "Train_VikingFighter_quick", cmd_quick, 624), Function.ability(499, "Train_Viper_quick", cmd_quick, 1354), Function.ability(500, "Train_VoidRay_quick", cmd_quick, 950), Function.ability(501, "Train_WarpPrism_quick", cmd_quick, 976), Function.ability(502, "Train_WidowMine_quick", cmd_quick, 614), Function.ability(503, "Train_Zealot_quick", cmd_quick, 916), Function.ability(504, "Train_Zergling_quick", cmd_quick, 1343), Function.ability(505, "TrainWarp_Adept_screen", cmd_screen, 1419), Function.ability(506, "TrainWarp_DarkTemplar_screen", cmd_screen, 1417), Function.ability(507, "TrainWarp_HighTemplar_screen", cmd_screen, 1416), Function.ability(508, "TrainWarp_Sentry_screen", cmd_screen, 1418), Function.ability(509, "TrainWarp_Stalker_screen", cmd_screen, 1414), Function.ability(510, "TrainWarp_Zealot_screen", cmd_screen, 1413), Function.ability(511, "UnloadAll_quick", cmd_quick, 3664), Function.ability(512, "UnloadAll_Bunker_quick", cmd_quick, 408, 3664), Function.ability(513, "UnloadAll_CommandCenter_quick", cmd_quick, 413, 3664), Function.ability(514, "UnloadAll_NydasNetwork_quick", cmd_quick, 1438, 3664), Function.ability(515, "UnloadAll_NydusWorm_quick", cmd_quick, 2371, 3664), Function.ability(516, "UnloadAllAt_screen", cmd_screen, 3669), Function.ability(517, "UnloadAllAt_minimap", cmd_minimap, 3669), Function.ability(518, "UnloadAllAt_Medivac_screen", cmd_screen, 396, 3669), Function.ability(519, "UnloadAllAt_Medivac_minimap", cmd_minimap, 396, 3669), Function.ability(520, "UnloadAllAt_Overlord_screen", cmd_screen, 1408, 3669), Function.ability(521, "UnloadAllAt_Overlord_minimap", cmd_minimap, 1408, 3669), Function.ability(522, "UnloadAllAt_WarpPrism_screen", cmd_screen, 913, 3669), Function.ability(523, "UnloadAllAt_WarpPrism_minimap", cmd_minimap, 913, 3669), ]) # pylint: enable=line-too-long # Some indexes to support features.py and action conversion. ABILITY_IDS = collections.defaultdict(set) # {ability_id: {funcs}} for func in FUNCTIONS: if func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k: frozenset(v) for k, v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id: f for f in FUNCTIONS if f.avail_fn} class FunctionCall(collections.namedtuple( "FunctionCall", ["function", "arguments"])): """Represents a function call action. Attributes: function: Store the function id, eg 2 for select_point. arguments: The list of arguments for that function, each being a list of ints. For select_point this could be: [[0], [23, 38]]. """ __slots__ = () @classmethod def all_arguments(cls, function, arguments): """Helper function for creating `FunctionCall`s with `Arguments`. Args: function: The value to store for the action function. arguments: The values to store for the arguments of the action. Can either be an `Arguments` object, a `dict`, or an iterable. If a `dict` or an iterable is provided, the values will be unpacked into an `Arguments` object. Returns: A new `FunctionCall` instance. """ if isinstance(arguments, dict): arguments = Arguments(**arguments) elif not isinstance(arguments, Arguments): arguments = Arguments(*arguments) return cls(function, arguments) class ValidActions(collections.namedtuple( "ValidActions", ["types", "functions"])): """The set of types and functions that are valid for an agent to use. Attributes: types: A namedtuple of the types that the functions require. Unlike TYPES above, this includes the sizes for screen and minimap. functions: A namedtuple of all the functions. """ __slots__ = ()
1.75
2
emoji/coffee.py
wbprice/ojimoji
0
3536
import numpy h = .25 s = 1 bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0], [0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0], [0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])
1.078125
1
predict_recognition.py
yeyupiaoling/Kersa-Speaker-Recognition
42
3560
import argparse import os import shutil import time import numpy as np from utils import model, utils from utils.record import RecordAudio parser = argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径') args = parser.parse_args() person_feature = [] person_name = [] # 获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully loading model {}.'.format(args.model_path)) # 预测获取声纹特征 def predict(path): specs = utils.load_data(path, mode='eval') specs = np.expand_dims(np.expand_dims(specs, 0), -1) feature = network_eval.predict(specs)[0] return feature # 加载要识别的音频库 def load_audio_db(audio_db_path): start = time.time() audios = os.listdir(audio_db_path) for audio in audios: path = os.path.join(audio_db_path, audio) name = audio[:-4] feature = predict(path) person_name.append(name) person_feature.append(feature) print("Loaded %s audio." % name) end = time.time() print('加载音频库完成,消耗时间:%fms' % (round((end - start) * 1000))) # 识别声纹 def recognition(path): name = '' pro = 0 feature = predict(path) for i, person_f in enumerate(person_feature): # 计算相识度 dist = np.dot(feature, person_f.T) if dist > pro: pro = dist name = person_name[i] return name, pro # 声纹注册 def register(path, user_name): save_path = os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:]) shutil.move(path, save_path) feature = predict(save_path) person_name.append(user_name) person_feature.append(feature) if __name__ == '__main__': load_audio_db(args.audio_db) record_audio = RecordAudio() while True: select_fun = int(input("请选择功能,0为注册音频到声纹库,1为执行声纹识别:")) if select_fun == 0: audio_path = record_audio.record() name = input("请输入该音频用户的名称:") if name == '': continue register(audio_path, name) elif select_fun == 1: audio_path = record_audio.record() name, p = recognition(audio_path) if p > args.threshold: print("识别说话的为:%s,相似度为:%f" % (name, p)) else: print("音频库没有该用户的语音") else: print('请正确选择功能')
1.570313
2
tests/unit/zhmcclient/test_hba.py
vkpro-forks/python-zhmcclient
0
3568
# Copyright 2016-2017 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for _hba module. """ from __future__ import absolute_import, print_function import pytest import re import copy from zhmcclient import Client, Hba, HTTPError, NotFound from zhmcclient_mock import FakedSession from tests.common.utils import assert_resources # Object IDs and names of our faked HBAs: HBA1_OID = 'hba 1-oid' HBA1_NAME = 'hba 1' HBA2_OID = 'hba 2-oid' HBA2_NAME = 'hba 2' # URIs and Object IDs of elements referenced in HBA properties: FCP1_OID = 'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object): """All tests for Hba and HbaManager classes.""" def setup_method(self): """ Set up a faked session, and add a faked CPC in DPM mode with one partition that has no HBAs. Add one FCP adapter and port. """ self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8') self.client = Client(self.session) # Add a CPC in DPM mode self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', # element-uri is set up automatically 'parent': None, 'class': 'cpc', 'name': 'fake-cpc1-name', 'description': 'CPC #1 (DPM mode)', 'status': 'active', 'dpm-enabled': True, 'is-ensemble-member': False, 'iml-mode': 'dpm', }) self.cpc = self.client.cpcs.find(name='fake-cpc1-name') # Add a partition to the CPC self.faked_partition = self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', # element-uri will be automatically set 'parent': self.faked_cpc.uri, 'class': 'partition', 'name': 'fake-part1-name', 'description': 'Partition #1', 'status': 'active', 'initial-memory': 1024, 'maximum-memory': 2048, }) self.partition = self.cpc.partitions.find(name='fake-part1-name') # Add an FCP adapter and port to the CPC self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp', 'adapter-id': '123', 'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count': 1, 'network-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'physical-channel-status': 'operating', }) self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri, 'class': 'storage-port', 'index': 1, 'name': 'fake-port11-name', 'description': 'FCP #1 Port #1', }) assert PORT11_URI == self.faked_port11.uri def add_hba1(self): """Add a faked HBA 1 to the faked partition.""" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA1_OID, # element-uri will be automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA1_NAME, 'description': 'HBA ' + HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111', }) return faked_hba def add_hba2(self): """Add a faked HBA 2 to the faked partition.""" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA2_OID, # element-uri will be automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA2_NAME, 'description': 'HBA ' + HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112', }) return faked_hba def test_hbamanager_initial_attrs(self): """Test initial attributes of HbaManager.""" hba_mgr = self.partition.hbas # Verify all public properties of the manager object assert hba_mgr.resource_class == Hba assert hba_mgr.session == self.session assert hba_mgr.parent == self.partition assert hba_mgr.partition == self.partition # TODO: Test for HbaManager.__repr__() @pytest.mark.parametrize( "full_properties_kwargs, prop_names", [ (dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None), ] ) def test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names): """Test HbaManager.list() with full_properties.""" # Add two faked HBAs faked_hba1 = self.add_hba1() faked_hba2 = self.add_hba2() exp_faked_hbas = [faked_hba1, faked_hba2] hba_mgr = self.partition.hbas # Execute the code to be tested hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize( "filter_args, exp_oids", [ ({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID + 'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME + 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID, HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name': '.+hba 1'}, []), ({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID + 'foo'}, []), ] ) def test_hbamanager_list_filter_args(self, filter_args, exp_oids): """Test HbaManager.list() with filter_args.""" # Add two faked HBAs self.add_hba1() self.add_hba2() hba_mgr = self.partition.hbas # Execute the code to be tested hbas = hba_mgr.list(filter_args=filter_args) assert len(hbas) == len(exp_oids) if exp_oids: oids = [hba.properties['element-id'] for hba in hbas] assert set(oids) == set(exp_oids) @pytest.mark.parametrize( "initial_partition_status, exp_status_exc", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None), ('reservation-error', None), ('paused', None), ] ) @pytest.mark.parametrize( "input_props, exp_prop_names, exp_prop_exc", [ ({}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x'}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'], None), ] ) def test_hbamanager_create( self, input_props, exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc): """Test HbaManager.create().""" # Set the status of the faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas if exp_status_exc: exp_exc = exp_status_exc elif exp_prop_exc: exp_exc = exp_prop_exc else: exp_exc = None if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the code to be tested hba = hba_mgr.create(properties=input_props) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason else: # Execute the code to be tested. # Note: the Hba object returned by Hba.create() has # the input properties plus 'element-uri' plus 'element-id'. hba = hba_mgr.create(properties=input_props) # Check the resource for consistency within itself assert isinstance(hba, Hba) hba_name = hba.name exp_hba_name = hba.properties['name'] assert hba_name == exp_hba_name hba_uri = hba.uri exp_hba_uri = hba.properties['element-uri'] assert hba_uri == exp_hba_uri # Check the properties against the expected names and values for prop_name in exp_prop_names: assert prop_name in hba.properties if prop_name in input_props: value = hba.properties[prop_name] exp_value = input_props[prop_name] assert value == exp_value def test_hba_repr(self): """Test Hba.__repr__().""" # Add a faked hba faked_hba = self.add_hba1() hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) # Execute the code to be tested repr_str = repr(hba) repr_str = repr_str.replace('\n', '\\n') # We check just the begin of the string: assert re.match(r'^{classname}\s+at\s+0x{id:08x}\s+\(\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize( "initial_partition_status, exp_exc", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None), ('reservation-error', None), ('paused', None), ] ) def test_hba_delete(self, initial_partition_status, exp_exc): """Test Hba.delete().""" # Add a faked HBA to be tested and another one faked_hba = self.add_hba1() self.add_hba2() # Set the status of the faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the code to be tested hba.delete() exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason # Check that the HBA still exists hba_mgr.find(name=faked_hba.name) else: # Execute the code to be tested. hba.delete() # Check that the HBA no longer exists with pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): """Test Hba.delete() followed by Hba.create() with same name.""" # Add a faked HBA to be tested and another one faked_hba = self.add_hba1() hba_name = faked_hba.name self.add_hba2() # Construct the input properties for a third HBA with same name part3_props = copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third HBA' # Set the status of the faked partition self.faked_partition.properties['status'] = 'stopped' # deletable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) # Execute the deletion code to be tested. hba.delete() # Check that the HBA no longer exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute the creation code to be tested. hba_mgr.create(part3_props) # Check that the HBA exists again under that name hba3 = hba_mgr.find(name=hba_name) description = hba3.get_property('description') assert description == 'Third HBA' @pytest.mark.parametrize( "input_props", [ {}, {'description': 'New HBA description'}, {'device-number': 'FEDC', 'description': 'New HBA description'}, ] ) def test_hba_update_properties(self, input_props): """Test Hba.update_properties().""" # Add a faked HBA faked_hba = self.add_hba1() # Set the status of the faked partition self.faked_partition.properties['status'] = 'stopped' # updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) # Execute the code to be tested hba.update_properties(properties=input_props) # Verify that the resource object already reflects the property # updates. for prop_name in saved_properties: if prop_name in input_props: exp_prop_value = input_props[prop_name] else: exp_prop_value = saved_properties[prop_name] assert prop_name in hba.properties prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value # Refresh the resource object and verify that the resource object # still reflects the property updates. hba.pull_full_properties() for prop_name in saved_properties: if prop_name in input_props: exp_prop_value = input_props[prop_name] else: exp_prop_value = saved_properties[prop_name] assert prop_name in hba.properties prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value def test_hba_update_name(self): """Test Hba.update_properties() with 'name' property.""" # Add a faked HBA faked_hba = self.add_hba1() hba_name = faked_hba.name # Set the status of the faked partition self.faked_partition.properties['status'] = 'stopped' # updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) new_hba_name = "new-" + hba_name # Execute the code to be tested hba.update_properties(properties={'name': new_hba_name}) # Verify that the resource is no longer found by its old name, using # list() (this does not use the name-to-URI cache). hbas_list = hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list) == 0 # Verify that the resource is no longer found by its old name, using # find() (this uses the name-to-URI cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify that the resource object already reflects the update, even # though it has not been refreshed yet. assert hba.properties['name'] == new_hba_name # Refresh the resource object and verify that it still reflects the # update. hba.pull_full_properties() assert hba.properties['name'] == new_hba_name # Verify that the resource can be found by its new name, using find() new_hba_find = hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] == new_hba_name # Verify that the resource can be found by its new name, using list() new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) == 1 new_hba_list = new_hbas_list[0] assert new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize( "initial_partition_status, exp_exc", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None), ('reservation-error', None), ('paused', None), ] ) def test_hba_reassign_port(self, initial_partition_status, exp_exc): """Test Hba.reassign_port().""" # Add a faked HBA to be tested. # Its port points to a faked URI. faked_hba = self.add_hba1() # Add a faked FCP with one port that the HBA will be reassigned to faked_adapter = self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', # object-uri is auto-set based upon object-id 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fake-fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp', # adapter-family is auto-set based upon type 'adapter-id': '123', 'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count': 1, 'storage-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'channel-path-id': '1B', 'physical-channel-status': 'operating', }) adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', # element-uri is auto-set based upon object-id 'parent': faked_adapter.uri, 'class': 'storage-port', 'name': 'fake-port1', 'description': 'FCP #1 Port 1', 'index': 0, 'fabric-id': None, }) port = adapter.ports.find(name='fake-port1') # Set the status of the faked partition self.faked_partition.properties['status'] = initial_partition_status # The HBA object we will perform the test on hba = self.partition.hbas.find(name=faked_hba.name) # Save the HBA properties for later comparison hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the code to be tested hba.reassign_port(port) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason # Check that the port of the HBA is unchanged ... prop_name = 'adapter-port-uri' # ... in the resource object: assert hba.properties[prop_name] == saved_properties[prop_name] # ... and again when refreshed from the mock state: hba.pull_full_properties() assert hba.properties[prop_name] == saved_properties[prop_name] else: # Execute the code to be tested. hba.reassign_port(port) # Check that the port of the HBA has been set ... # ... in the resource object: prop_name = 'adapter-port-uri' assert hba.properties[prop_name] == port.uri # ... and again when refreshed from the mock state: hba.pull_full_properties() assert hba.properties[prop_name] == port.uri
1.367188
1