id
int64 0
6k
| code
stringlengths 4k
8k
| code_compressed
null |
---|---|---|
200 | import pytest, py
import re
def exvalue():
import sys
return sys.exc_info()[1]
def f():
return 2
def test_assert():
try:
assert f() == 3
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def test_assert_within_finally():
excinfo = py.test.raises(ZeroDivisionError, """
try:
1/0
finally:
i = 42
""")
s = excinfo.exconly()
assert re.search("ZeroDivisionError:.*division", s) is not None
def test_assert_multiline_1():
try:
assert (f() ==
3)
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def test_assert_multiline_2():
try:
assert (f() == (4,
3)[-1])
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 ==')
def test_in():
try:
assert "hi" in [1, 2]
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 'hi' in")
def test_is():
try:
assert 1 is 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 is 2")
def test_attrib():
class Foo(object):
b = 1
i = Foo()
try:
assert i.b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
def test_attrib_inst():
class Foo(object):
b = 1
try:
assert Foo().b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
def test_len():
l = list(range(42))
try:
assert len(l) == 100
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 42 == 100")
assert "where 42 = len([" in s
def test_assert_keyword_arg():
def f(x=3):
return False
try:
assert f(x=5)
except AssertionError:
e = exvalue()
assert "x=5" in str(e)
# These tests should both fail, but should fail nicely...
class WeirdRepr:
def __repr__(self):
return '<WeirdRepr\nsecond line>'
def bug_test_assert_repr():
v = WeirdRepr()
try:
assert v == 1
except AssertionError:
e = exvalue()
assert str(e).find('WeirdRepr') != -1
assert str(e).find('second line') != -1
assert 0
def test_assert_non_string():
try:
assert 0, ['list']
except AssertionError:
e = exvalue()
assert str(e).find("list") != -1
def test_assert_implicit_multiline():
try:
x = [1,2,3]
assert x != [1,
2, 3]
except AssertionError:
e = exvalue()
assert str(e).find('assert [1, 2, 3] !=') != -1
@py.test.mark.xfail(py.test.__version__[0] != "2",
reason="broken on modern pytest",
run=False
)
def test_assert_with_brokenrepr_arg():
class BrokenRepr:
def __repr__(self): 0 / 0
e = AssertionError(BrokenRepr())
if e.msg.find("broken __repr__") == -1:
py.test.fail("broken __repr__ not handle correctly")
def test_multiple_statements_per_line():
try:
a = 1; assert a == 2
except AssertionError:
e = exvalue()
assert "assert 1 == 2" in str(e)
def test_power():
try:
assert 2**3 == 7
except AssertionError:
e = exvalue()
assert "assert (2 ** 3) == 7" in str(e)
class TestView:
def setup_class(cls):
cls.View = py.test.importorskip("py._code._assertionold").View
def METHOD_NAME(self):
### Use a custom class hierarchy with existing instances
class Picklable(self.View):
pass
class Simple(Picklable):
__view__ = object
def pickle(self):
return repr(self.__obj__)
class Seq(Picklable):
__view__ = list, tuple, dict
def pickle(self):
return ';'.join(
[Picklable(item).pickle() for item in self.__obj__])
class Dict(Seq):
__view__ = dict
def pickle(self):
return Seq.pickle(self) + '!' + Seq(self.values()).pickle()
assert Picklable(123).pickle() == '123'
assert Picklable([1,[2,3],4]).pickle() == '1;2;3;4'
assert Picklable({1:2}).pickle() == '1!2'
def test_viewtype_class_hierarchy(self):
# Use a custom class hierarchy based on attributes of existing instances
class Operation:
"Existing class that I don't want to change."
def __init__(self, opname, *args):
self.opname = opname
self.args = args
existing = [Operation('+', 4, 5),
Operation('getitem', '', 'join'),
Operation('setattr', 'x', 'y', 3),
Operation('-', 12, 1)]
class PyOp(self.View):
def __viewkey__(self):
return self.opname
def generate(self):
return '%s(%s)' % (self.opname, ', '.join(map(repr, self.args)))
class PyBinaryOp(PyOp):
__view__ = ('+', '-', '*', '/')
def generate(self):
return '%s %s %s' % (self.args[0], self.opname, self.args[1])
codelines = [PyOp(op).generate() for op in existing]
assert codelines == ["4 + 5", "getitem('', 'join')",
"setattr('x', 'y', 3)", "12 - 1"]
def test_underscore_api():
py.code._AssertionError
py.code._reinterpret_old # used by pypy
py.code._reinterpret
def test_assert_customizable_reprcompare(monkeypatch):
util = pytest.importorskip("_pytest.assertion.util")
monkeypatch.setattr(util, '_reprcompare', lambda *args: 'hello')
try:
assert 3 == 4
except AssertionError:
e = exvalue()
s = str(e)
assert "hello" in s
def test_assert_long_source_1():
try:
assert len == [
(None, ['somet text', 'more text']),
]
except AssertionError:
e = exvalue()
s = str(e)
assert 're-run' not in s
assert 'somet text' in s
def test_assert_long_source_2():
try:
assert(len == [
(None, ['somet text', 'more text']),
])
except AssertionError:
e = exvalue()
s = str(e)
assert 're-run' not in s
assert 'somet text' in s
def test_assert_raise_alias(testdir):
testdir.makepyfile("""
import sys
EX = AssertionError
def test_hello():
raise EX("hello"
"multi"
"line")
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello*",
"*raise EX*",
"*1 failed*",
])
@py.test.mark.xfail(py.test.__version__[0] != "2",
reason="broken on modern pytest",
run=False)
def test_assert_raise_subclass():
class SomeEx(AssertionError):
def __init__(self, *args):
super(SomeEx, self).__init__()
try:
raise SomeEx("hello")
except AssertionError as e:
s = str(e)
assert 're-run' not in s
assert 'could not determine' in s
def test_assert_raises_in_nonzero_of_object_pytest_issue10():
class A(object):
def __nonzero__(self):
raise ValueError(42)
def __lt__(self, other):
return A()
def __repr__(self):
return "<MY42 object>"
def myany(x):
return True
try:
assert not(myany(A() < 0))
except AssertionError:
e = exvalue()
s = str(e)
assert "<MY42 object> < 0" in s | null |
201 | from __future__ import annotations
from collections import defaultdict
from typing import Dict, Iterable, Set, Tuple, TypeVar, Union
from pharmpy.deps import sympy
from pharmpy.internals.expr.subs import subs
from pharmpy.internals.expr.tree import prune
from pharmpy.internals.graph.directed.reachability import reachable_from
from pharmpy.model import Assignment, Model, ODESystem
T = TypeVar('T')
def METHOD_NAME(expr):
if expr.is_Mul and expr.args[0] == -1:
return sympy.Mul(*expr.args[1:])
else:
return expr
def get_unit_of(model: Model, variable: Union[str, sympy.Symbol]):
"""Derive the physical unit of a variable in the model
Unit information for the dataset needs to be available.
The variable can be defined in the code, a dataset olumn, a parameter
or a random variable.
Parameters
----------
model : Model
Pharmpy model object
variable : str or Symbol
Find physical unit of this variable
Returns
-------
unit expression
A sympy physics.units expression
Examples
--------
>>> from pharmpy.modeling import load_example_model, get_unit_of
>>> model = load_example_model("pheno")
>>> get_unit_of(model, "Y")
milligram/liter
>>> get_unit_of(model, "V")
liter
>>> get_unit_of(model, "WGT")
kilogram
"""
if isinstance(variable, str):
symbol = sympy.Symbol(variable)
else:
symbol = variable
variable = variable.name
di = model.datainfo
if variable in di.names:
return di[variable].unit
# FIXME: handle other DVs?
y = list(model.dependent_variables.keys())[0]
input_units = {sympy.Symbol(col.name): col.unit for col in di}
pruned_nodes = {sympy.exp}
def pruning_predicate(e: sympy.Expr) -> bool:
return e.func in pruned_nodes
unit_eqs = []
unit_eqs.append(y - di[di.dv_column.name].unit)
for s in model.statements:
if isinstance(s, Assignment):
expr = sympy.expand(
subs(prune(pruning_predicate, s.expression), input_units, simultaneous=True)
)
if expr.is_Add:
for term in expr.args:
unit_eqs.append(s.symbol - METHOD_NAME(term))
else:
unit_eqs.append(s.symbol - METHOD_NAME(expr))
elif isinstance(s, ODESystem):
amt_unit = di[di.typeix['dose'][0].name].unit
time_unit = di[di.idv_column.name].unit
for e in s.compartmental_matrix.diagonal():
if e.is_Add:
for term in e.args:
unit_eqs.append(amt_unit / time_unit - METHOD_NAME(term))
elif e == 0:
pass
else:
unit_eqs.append(amt_unit / time_unit - METHOD_NAME(e))
for a in s.amounts:
unit_eqs.append(amt_unit - a)
# NOTE This keeps only the equations required to solve for "symbol"
filtered_unit_eqs = _filter_equations(unit_eqs, symbol)
# NOTE For some reason telling sympy to solve for "symbol" does not work
sol = sympy.solve(filtered_unit_eqs, dict=True)
return sol[0][symbol]
def _filter_equations(
equations: Iterable[sympy.Expr], symbol: sympy.Symbol
) -> Iterable[sympy.Expr]:
# NOTE This has the side-effect of deduplicating equations
fs = {eq: eq.free_symbols for eq in equations}
# NOTE We could first contract clique edges but I have not found a way to
# make it as elegant as the current implementation
edges = _cliques_spanning_forest_edges_linear_superset(fs.values())
graph = _adjacency_list(edges)
dependent_symbols = reachable_from(
{symbol},
graph.__getitem__,
)
# NOTE All symbols are in the same connected component so we only need to
# test one symbol for each equation
return (
eq for eq, symbols in fs.items() if symbols and next(iter(symbols)) in dependent_symbols
)
def _adjacency_list(edges: Iterable[Tuple[T, T]]) -> Dict[T, Set[T]]:
graph = defaultdict(set)
for u, v in edges:
graph[u].add(v)
graph[v].add(u)
return graph
def _cliques_spanning_forest_edges_linear_superset(
cliques: Iterable[Iterable[T]],
) -> Iterable[Tuple[T, T]]:
# NOTE This is not a forest but it has a linear number of edges in the
# input size. Building a spanning tree would require a union-find data
# structure and superlinear time, which is unnecessary here since we are
# only interested in connected components of the graph.
for clique in cliques:
yield from _clique_spanning_tree_edges(clique)
def _clique_spanning_tree_edges(clique: Iterable[T]) -> Iterable[Tuple[T, T]]:
it = iter(clique)
try:
u = next(it)
except StopIteration:
return
for v in it:
yield (u, v) | null |
202 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# «recovery_xml» - Helper Class for parsing and using a bto.xml
#
# Copyright (C) 2010-2011, Dell Inc.
#
# Author:
# - Mario Limonciello <[email protected]>
#
# This is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this application; if not, write to the Free Software Foundation, Inc., 51
# Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##################################################################################
import xml.dom.minidom
import codecs
import os
def METHOD_NAME(old):
if isinstance(old, str):
return old
else:
return str(bytes(old), 'utf-8', errors='ignore')
class BTOxml:
def __init__(self):
self.dom = None
self.new = False
self.load_bto_xml()
def set_base(self, name, md5=''):
"""Sets the base image"""
self.replace_node_contents('base', name)
if md5:
self.dom.getElementsByTagName('base')[0].setAttribute('md5', md5)
def append_fish(self, fish_type, name, md5='', srv=''):
"""Appends a fish package"""
elements = self.dom.getElementsByTagName('fish')
new_element = self.dom.createElement(fish_type)
if md5:
new_element.setAttribute('md5', md5)
if srv:
new_element.setAttribute('srv', srv)
new_node = self.dom.createTextNode(name)
new_element.appendChild(new_node)
elements[0].appendChild(new_element)
def fetch_node_contents(self, tag):
"""Fetches all children of a tag"""
elements = self.dom.getElementsByTagName(tag)
values = str('')
if len(elements) > 1:
values = []
if elements:
for element in elements:
child = element.firstChild
if child:
if len(elements) > 1:
values.append(child.nodeValue.strip())
else:
values = child.nodeValue.strip()
return values
def replace_node_contents(self, tag, new):
"""Replaces a node contents (that we assume exists)"""
elements = self.dom.getElementsByTagName(tag)
if not elements:
print("Missing elements for tag")
return
if elements[0].hasChildNodes():
for node in elements[0].childNodes:
elements[0].removeChild(node)
noob = self.dom.createTextNode(METHOD_NAME(new))
elements[0].appendChild(noob)
def load_bto_xml(self, fname=None):
"""Initialize an XML file into memory"""
def create_top_level(dom):
"""Initializes a top level document"""
element = dom.createElement('bto')
dom.appendChild(element)
return element
def create_tag(dom, tag, append_to):
"""Create a subtag as necessary"""
element = dom.getElementsByTagName(tag)
if element:
element = element[0]
else:
element = dom.createElement(tag)
append_to.appendChild(element)
return element
if fname:
self.new = False
try:
if os.path.exists(fname):
with open(fname, 'rb') as f:
fname = f.read()
self.dom = xml.dom.minidom.parseString(METHOD_NAME(fname))
except xml.parsers.expat.ExpatError:
print("Damaged XML file, regenerating")
if not (fname and self.dom):
self.new = True
self.dom = xml.dom.minidom.Document()
#test for top level bto object
if self.dom.firstChild and self.dom.firstChild.localName != 'bto':
self.dom.removeChild(self.dom.firstChild)
if not self.dom.firstChild:
bto = create_top_level(self.dom)
else:
bto = self.dom.getElementsByTagName('bto')[0]
#create all our second and third level tags that are supported
for tag in ['date', 'versions', 'base', 'fid', 'fish', 'logs']:
element = create_tag(self.dom, tag, bto)
subtags = []
if tag == 'versions':
subtags = ['os', 'iso', 'generator', 'bootstrap', 'ubiquity', 'revision', 'platform']
elif tag == 'fid':
subtags = ['git_tag', 'deb_archive']
elif tag == 'logs':
subtags = ['syslog', 'debug']
for subtag in subtags:
create_tag(self.dom, subtag, element)
def write_xml(self, fname):
"""Writes out a BTO XML file based on the current data"""
with codecs.open(fname, 'w', 'utf-8') as wfd:
if self.new:
self.dom.writexml(wfd, "", " ", "\n", encoding='utf-8')
else:
self.dom.writexml(wfd, encoding='utf-8') | null |
203 | # coding=utf-8
# Copyright 2018-2023 EvaDB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from evadb.catalog.catalog_type import ColumnType, Dimension, NdArrayType
from evadb.functions.decorators.io_descriptors.data_types import (
NumpyArray,
PandasDataframe,
PyTorchTensor,
)
from evadb.utils.errors import FunctionIODefinitionError
class FunctionIODescriptorsTests(unittest.TestCase):
def test_catalog_entry_for_numpy_entry(self):
numpy_array = NumpyArray(
name="input", is_nullable=False, type=NdArrayType.UINT8, dimensions=(2, 2)
)
catalog_entries = numpy_array.generate_catalog_entries()
# check that there is only a single catalog entry
self.assertEqual(len(catalog_entries), 1)
# check that the attributes of the catalog entry are correct
catalog_entry = catalog_entries[0]
self.assertEqual(catalog_entry.name, "input")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.UINT8)
self.assertEqual(catalog_entry.array_dimensions, (2, 2))
self.assertEqual(catalog_entry.is_input, False)
def test_catalog_entry_for_pytorch_entry(self):
pytorch_tensor = PyTorchTensor(
name="input", is_nullable=False, type=NdArrayType.UINT8, dimensions=(2, 2)
)
catalog_entries = pytorch_tensor.generate_catalog_entries()
# check that there is only a single catalog entry
self.assertEqual(len(catalog_entries), 1)
# check that the attributes of the catalog entry are correct
catalog_entry = catalog_entries[0]
self.assertEqual(catalog_entry.name, "input")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.UINT8)
self.assertEqual(catalog_entry.array_dimensions, (2, 2))
self.assertEqual(catalog_entry.is_input, False)
def test_catalog_entry_for_pandas_entry_with_single_column_simple(self):
# dataframe has only columns defined
pandas_dataframe = PandasDataframe(columns=["Frame_Array"])
catalog_entries = pandas_dataframe.generate_catalog_entries()
# check that there is only a single catalog entry
self.assertEqual(len(catalog_entries), 1)
# check that the attributes of the catalog entry are correct
catalog_entry = catalog_entries[0]
self.assertEqual(catalog_entry.name, "Frame_Array")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.ANYTYPE)
self.assertEqual(catalog_entry.array_dimensions, Dimension.ANYDIM)
def test_catalog_entry_for_pandas_entry_with_single_column(self):
pandas_dataframe = PandasDataframe(
columns=["Frame_Array"],
column_types=[NdArrayType.UINT8],
column_shapes=[(3, 256, 256)],
)
catalog_entries = pandas_dataframe.generate_catalog_entries()
# check that there is only a single catalog entry
self.assertEqual(len(catalog_entries), 1)
# check that the attributes of the catalog entry are correct
catalog_entry = catalog_entries[0]
self.assertEqual(catalog_entry.name, "Frame_Array")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.UINT8)
self.assertEqual(catalog_entry.array_dimensions, (3, 256, 256))
self.assertEqual(catalog_entry.is_input, False)
def test_catalog_entry_for_pandas_entry_with_multiple_columns_simple(self):
# dataframe has only columns defined
pandas_dataframe = PandasDataframe(columns=["Frame_Array", "Frame_Array_2"])
catalog_entries = pandas_dataframe.generate_catalog_entries()
# check that there is only a single catalog entry
self.assertEqual(len(catalog_entries), 2)
# check that the attributes of the catalog entry are correct
catalog_entry = catalog_entries[0]
self.assertEqual(catalog_entry.name, "Frame_Array")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.ANYTYPE)
self.assertEqual(catalog_entry.array_dimensions, Dimension.ANYDIM)
catalog_entry = catalog_entries[1]
self.assertEqual(catalog_entry.name, "Frame_Array_2")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.ANYTYPE)
self.assertEqual(catalog_entry.array_dimensions, Dimension.ANYDIM)
def METHOD_NAME(self):
pandas_dataframe = PandasDataframe(
columns=["Frame_Array", "Frame_Array_2"],
column_types=[NdArrayType.UINT8, NdArrayType.FLOAT32],
column_shapes=[(3, 256, 256), (3, 256, 256)],
)
catalog_entries = pandas_dataframe.generate_catalog_entries()
# check that there is only a single catalog entry
self.assertEqual(len(catalog_entries), 2)
# check that the attributes of the catalog entry are correct
catalog_entry = catalog_entries[0]
self.assertEqual(catalog_entry.name, "Frame_Array")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.UINT8)
self.assertEqual(catalog_entry.array_dimensions, (3, 256, 256))
self.assertEqual(catalog_entry.is_input, False)
catalog_entry = catalog_entries[1]
self.assertEqual(catalog_entry.name, "Frame_Array_2")
self.assertEqual(catalog_entry.type, ColumnType.NDARRAY)
self.assertEqual(catalog_entry.is_nullable, False)
self.assertEqual(catalog_entry.array_type, NdArrayType.FLOAT32)
self.assertEqual(catalog_entry.array_dimensions, (3, 256, 256))
self.assertEqual(catalog_entry.is_input, False)
def test_raises_error_on_incorrect_pandas_definition(self):
# the dataframe should have multiple columns but column_types should be defined for only one
pandas_dataframe = PandasDataframe(
columns=["Frame_Array", "Frame_Array_2"],
column_types=[NdArrayType.UINT8],
column_shapes=[(3, 256, 256), (3, 256, 256)],
)
with self.assertRaises(FunctionIODefinitionError):
pandas_dataframe.generate_catalog_entries() | null |
204 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmc.endpoint import endpoint_data
class ModifyReplicationJobAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'smc', '2019-06-01', 'ModifyReplicationJobAttribute','smc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_TargetType(self):
return self.get_query_params().get('TargetType')
def set_TargetType(self,TargetType):
self.add_query_param('TargetType',TargetType)
def get_Description(self):
return self.get_query_params().get('Description')
def set_Description(self,Description):
self.add_query_param('Description',Description)
def get_Frequency(self):
return self.get_query_params().get('Frequency')
def set_Frequency(self,Frequency):
self.add_query_param('Frequency',Frequency)
def get_JobId(self):
return self.get_query_params().get('JobId')
def set_JobId(self,JobId):
self.add_query_param('JobId',JobId)
def get_ImageName(self):
return self.get_query_params().get('ImageName')
def set_ImageName(self,ImageName):
self.add_query_param('ImageName',ImageName)
def get_SystemDiskSize(self):
return self.get_query_params().get('SystemDiskSize')
def set_SystemDiskSize(self,SystemDiskSize):
self.add_query_param('SystemDiskSize',SystemDiskSize)
def get_InstanceType(self):
return self.get_query_params().get('InstanceType')
def set_InstanceType(self,InstanceType):
self.add_query_param('InstanceType',InstanceType)
def get_ContainerRepository(self):
return self.get_query_params().get('ContainerRepository')
def METHOD_NAME(self,ContainerRepository):
self.add_query_param('ContainerRepository',ContainerRepository)
def get_ContainerTag(self):
return self.get_query_params().get('ContainerTag')
def set_ContainerTag(self,ContainerTag):
self.add_query_param('ContainerTag',ContainerTag)
def get_ContainerNamespace(self):
return self.get_query_params().get('ContainerNamespace')
def set_ContainerNamespace(self,ContainerNamespace):
self.add_query_param('ContainerNamespace',ContainerNamespace)
def get_LaunchTemplateId(self):
return self.get_query_params().get('LaunchTemplateId')
def set_LaunchTemplateId(self,LaunchTemplateId):
self.add_query_param('LaunchTemplateId',LaunchTemplateId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_SystemDiskParts(self):
return self.get_query_params().get('SystemDiskPart')
def set_SystemDiskParts(self, SystemDiskParts):
for depth1 in range(len(SystemDiskParts)):
if SystemDiskParts[depth1].get('SizeBytes') is not None:
self.add_query_param('SystemDiskPart.' + str(depth1 + 1) + '.SizeBytes', SystemDiskParts[depth1].get('SizeBytes'))
if SystemDiskParts[depth1].get('Block') is not None:
self.add_query_param('SystemDiskPart.' + str(depth1 + 1) + '.Block', SystemDiskParts[depth1].get('Block'))
if SystemDiskParts[depth1].get('Device') is not None:
self.add_query_param('SystemDiskPart.' + str(depth1 + 1) + '.Device', SystemDiskParts[depth1].get('Device'))
def get_ValidTime(self):
return self.get_query_params().get('ValidTime')
def set_ValidTime(self,ValidTime):
self.add_query_param('ValidTime',ValidTime)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_DataDisks(self):
return self.get_query_params().get('DataDisk')
def set_DataDisks(self, DataDisks):
for depth1 in range(len(DataDisks)):
if DataDisks[depth1].get('Size') is not None:
self.add_query_param('DataDisk.' + str(depth1 + 1) + '.Size', DataDisks[depth1].get('Size'))
if DataDisks[depth1].get('Part') is not None:
for depth2 in range(len(DataDisks[depth1].get('Part'))):
if DataDisks[depth1].get('Part')[depth2].get('SizeBytes') is not None:
self.add_query_param('DataDisk.' + str(depth1 + 1) + '.Part.' + str(depth2 + 1) + '.SizeBytes', DataDisks[depth1].get('Part')[depth2].get('SizeBytes'))
if DataDisks[depth1].get('Part')[depth2].get('Block') is not None:
self.add_query_param('DataDisk.' + str(depth1 + 1) + '.Part.' + str(depth2 + 1) + '.Block', DataDisks[depth1].get('Part')[depth2].get('Block'))
if DataDisks[depth1].get('Part')[depth2].get('Device') is not None:
self.add_query_param('DataDisk.' + str(depth1 + 1) + '.Part.' + str(depth2 + 1) + '.Device', DataDisks[depth1].get('Part')[depth2].get('Device'))
if DataDisks[depth1].get('Index') is not None:
self.add_query_param('DataDisk.' + str(depth1 + 1) + '.Index', DataDisks[depth1].get('Index'))
def get_LaunchTemplateVersion(self):
return self.get_query_params().get('LaunchTemplateVersion')
def set_LaunchTemplateVersion(self,LaunchTemplateVersion):
self.add_query_param('LaunchTemplateVersion',LaunchTemplateVersion)
def get_ScheduledStartTime(self):
return self.get_query_params().get('ScheduledStartTime')
def set_ScheduledStartTime(self,ScheduledStartTime):
self.add_query_param('ScheduledStartTime',ScheduledStartTime)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_InstanceRamRole(self):
return self.get_query_params().get('InstanceRamRole')
def set_InstanceRamRole(self,InstanceRamRole):
self.add_query_param('InstanceRamRole',InstanceRamRole)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_MaxNumberOfImageToKeep(self):
return self.get_query_params().get('MaxNumberOfImageToKeep')
def set_MaxNumberOfImageToKeep(self,MaxNumberOfImageToKeep):
self.add_query_param('MaxNumberOfImageToKeep',MaxNumberOfImageToKeep | null |
205 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2021-2023 Valory AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""HTTP server to control the tendermint execution environment."""
import json
import logging
import os
import shutil
import stat
import time
from pathlib import Path
from typing import Any, Callable, Dict, Optional, Tuple
from flask import Flask, Response, jsonify, request
from werkzeug.exceptions import InternalServerError, NotFound
from packages.valory.agents.register_reset.tests.helpers.slow_tendermint_server.tendermint import (
TendermintNode,
TendermintParams,
)
ENCODING = "utf-8"
DEFAULT_LOG_FILE = "log.log"
IS_DEV_MODE = os.environ.get("DEV_MODE", "0") == "1"
CONFIG_OVERRIDE = [
("fast_sync = true", "fast_sync = false"),
("max_num_outbound_peers = 10", "max_num_outbound_peers = 0"),
("pex = true", "pex = false"),
]
logging.basicConfig(
filename=os.environ.get("LOG_FILE", DEFAULT_LOG_FILE),
level=logging.DEBUG,
format="%(asctime)s %(levelname)s %(name)s %(threadName)s : %(message)s", # noqa : W1309
)
def load_genesis() -> Any:
"""Load genesis file."""
return json.loads(
Path(os.environ["TMHOME"], "config", "genesis.json").read_text(
encoding=ENCODING
)
)
def get_defaults() -> Dict[str, str]:
"""Get defaults from genesis file."""
genesis = load_genesis()
return dict(genesis_time=genesis.get("genesis_time"))
def override_config_toml() -> None:
"""Update sync method."""
config_path = str(Path(os.environ["TMHOME"]) / "config" / "config.toml")
with open(config_path, "r", encoding=ENCODING) as fp:
config = fp.read()
for old, new in CONFIG_OVERRIDE:
config = config.replace(old, new)
with open(config_path, "w+", encoding=ENCODING) as fp:
fp.write(config)
class PeriodDumper:
"""Dumper for tendermint data."""
resets: int
dump_dir: Path
logger: logging.Logger
def __init__(self, logger: logging.Logger, dump_dir: Optional[Path] = None) -> None:
"""Initialize object."""
self.resets = 0
self.logger = logger
self.dump_dir = dump_dir or Path("/tm_state")
if self.dump_dir.is_dir():
shutil.rmtree(str(self.dump_dir), onerror=self.readonly_handler)
self.dump_dir.mkdir(exist_ok=True)
@staticmethod
def readonly_handler(
func: Callable, path: str, execinfo: Any # pylint: disable=unused-argument
) -> None:
"""If permission is readonly, we change and retry."""
try:
os.chmod(path, stat.S_IWRITE)
func(path)
except (FileNotFoundError, OSError):
return
def dump_period(self) -> None:
"""Dump tendermint run data for replay"""
store_dir = self.dump_dir / f"period_{self.resets}"
store_dir.mkdir(exist_ok=True)
try:
shutil.copytree(
os.environ["TMHOME"], str(store_dir / ("node" + os.environ["ID"]))
)
self.logger.info(f"Dumped data for period {self.resets}")
except OSError:
self.logger.info(
f"Error occurred while dumping data for period {self.resets}"
)
self.resets += 1
def create_app(
dump_dir: Optional[Path] = None,
perform_monitoring: bool = True,
debug: bool = False,
) -> Tuple[Flask, TendermintNode]:
"""
Create a tendermint server app that is slow to respond to /hard_reset response.
This implementation was copied over from deployments/tendermint.
THIS IMPLEMENTATION SHOULD NOT BE USED IN TESTS WHERE NORMAL OPERATION OF THE TENDERMINT SERVER APP IS REQUIRED!
"""
override_config_toml()
tendermint_params = TendermintParams(
proxy_app=os.environ["PROXY_APP"],
consensus_create_empty_blocks=os.environ["CREATE_EMPTY_BLOCKS"] == "true",
home=os.environ["TMHOME"],
)
app = Flask(__name__)
period_dumper = PeriodDumper(logger=app.logger, dump_dir=dump_dir)
tendermint_node = TendermintNode(tendermint_params, logger=app.logger)
tendermint_node.start(start_monitoring=perform_monitoring, debug=debug)
@app.route("/hard_reset")
def hard_reset() -> Tuple[Any, int]:
"""Reset the node forcefully, and prune the blocks"""
try:
tendermint_node.stop()
if IS_DEV_MODE:
period_dumper.dump_period()
return_code = tendermint_node.prune_blocks()
if return_code:
tendermint_node.start(start_monitoring=perform_monitoring)
raise RuntimeError("Could not perform `unsafe-reset-all` successfully!")
defaults = get_defaults()
tendermint_node.reset_genesis_file(
request.args.get("genesis_time", defaults["genesis_time"]),
# default should be 1: https://github.com/tendermint/tendermint/pull/5191/files
request.args.get("initial_height", "1"),
request.args.get("period_count", "0"),
)
tendermint_node.start(start_monitoring=perform_monitoring)
# we assume we have a 5 seconds delay between the time the tendermint node starts
# and when the agent receiving a response, 5 seconds should be enough for
# tendermint to start and perform a Handshake Info request, where the agent
# would respond with a non-zero height, because the agent has not wiped its
# local blockchain. Checkout https://docs.tendermint.com/v0.33/app-dev/app-development.html#handshake
delay = 5
time.sleep(delay)
return jsonify({"message": "Reset successful.", "status": True}), 200
except Exception as e: # pylint: disable=W0703
return jsonify({"message": f"Reset failed: {e}", "status": False}), 200
@app.errorhandler(404)
def METHOD_NAME(e: NotFound) -> Response:
"""Handle server error."""
app.logger.info(e) # pylint: disable=E
return Response("Not Found", status=404, mimetype="application/json")
@app.errorhandler(500)
def handle_server_error(e: InternalServerError) -> Response:
"""Handle server error."""
app.logger.info(e) # pylint: disable=E
return Response("Error Closing Node", status=500, mimetype="application/json")
return app, tendermint_node
def create_server() -> Any:
"""Function to retrieve just the app to be used by flask entry point."""
flask_app, _ = create_app()
return flask_app | null |
206 | #!/usr/bin/python
'''
Copyright (c) 2020, dataJAR Ltd. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither data JAR Ltd nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DATA JAR LTD 'AS IS' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL DATA JAR LTD BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
SUPPORT FOR THIS PROGRAM
This program is distributed 'as is' by DATA JAR LTD.
For more information or support, please utilise the following resources:
http://www.datajar.co.uk
DESCRIPTION
Imports Adobe 2020 titles found in running users ~/Downloads
'''
from __future__ import absolute_import
from __future__ import print_function
import argparse
import glob
import os
import subprocess
import sys
__version__ = '1.1'
def main():
'''Gimme some main'''
adobe_folders = []
for some_item in os.listdir(DOWNLOADS_PATH):
some_path = os.path.join(DOWNLOADS_PATH, some_item)
if os.path.isdir(some_path):
if some_item.startswith('Adobe') and some_item.endswith('2020'):
adobe_folders.append(some_item)
if not len(adobe_folders):
print('No Adobe*2020 folders found in %s, exiting...' % DOWNLOADS_PATH)
sys.exit(1)
if len(adobe_folders) == 1:
print('1 Adobe 2020 folder found, creating recipe list...')
else:
print('%s Adobe 2020 folder found, creating recipe list...' % len(adobe_folders))
open(ADOBE_LIST, 'w').close()
pkg_checker(adobe_folders)
def pkg_checker(adobe_folders):
''' Check that we have the Install_pkg's & proceed if we do'''
found_pkgs = 0
print('Looking for pkgs...')
for adobe_folder in sorted(adobe_folders):
try:
install_pkg = glob.glob(os.path.join(DOWNLOADS_PATH, adobe_folder, \
'Build', '*_Install.pkg'))[0]
print('Found {0}...'.format(install_pkg))
if os.path.exists(install_pkg):
create_list(adobe_folder)
found_pkgs += 1
else:
print('Cannot find pkg ({0}), for {1}... Skipping...'.format\
(install_pkg, adobe_folder))
except IndexError as err_msg:
print('Skipping {0}, as cannot find Install.pkg: {1}...'.format(adobe_folder, err_msg))
if found_pkgs == 0:
print('No pkgs found, exiting...')
sys.exit(1)
else:
METHOD_NAME()
def create_list(adobe_folder):
''' Create recipe list '''
library_dir = os.path.expanduser('~/Library/')
override_path = os.path.join(library_dir, 'AutoPkg', 'RecipeOverrides', \
adobe_folder + '.' \
+ RECIPE_TYPE + '.recipe')
override_name = 'local.' + RECIPE_TYPE + '.' + adobe_folder
if not os.path.isfile(override_path):
print('Skipping {0}, as cannot find override...'.format(override_path))
return
list_file = open(ADOBE_LIST, 'a+')
list_file.write(override_name + '\n')
list_file.close()
def METHOD_NAME():
'''Run recipe list'''
if os.path.exists(ADOBE_LIST):
print('Running recipe_list: `{0}`'.format(ADOBE_LIST))
print()
cmd_args = ['/usr/local/bin/autopkg', 'run', '-v', '--recipe-list', ADOBE_LIST, \
'--report-plist', REPORT_PATH]
print('Running `{0}`...'.format(cmd_args))
subprocess.call(cmd_args)
else:
print('Recipe list not populated, make sure you have the needed overrides in place....')
if __name__ == '__main__':
# Try to locate autopkg
if not os.path.exists('/usr/local/bin/autopkg'):
print('Cannot find autopkg')
sys.exit(1)
# Parse recipe type argument
PARSER = argparse.ArgumentParser()
PARSER.add_argument('type', type=str, help='Recipe type, either "munki" or "jss"')
ARG_PARSER = PARSER.parse_args()
RECIPE_TYPE = ARG_PARSER.type.lower()
# Constants
DOWNLOADS_PATH = os.path.expanduser('~/Downloads/')
ADOBE_LIST = os.path.join(DOWNLOADS_PATH + 'adobe2020_list.txt')
REPORT_PATH = os.path.join(DOWNLOADS_PATH + 'adobe2020_report.plist')
main() | null |
207 | import requests
from plugin import plugin, require
from colorama import Fore
from bs4 import BeautifulSoup
@require(network=True)
@plugin("food recipe")
def getChoices(jarvis, s):
"""
function gets the choice of the type of cuisine the user wants.
user must get an api key from https://spoonacular.com/food-api/docs#Authentication
THIS WILL NOT WORK WITHOUT THE API KEY.
user has to input api key everytime they run the plugin
"""
user_api_key = jarvis.input(
"Enter spoonacular.com food-api API_KEY (visit https://spoonacular.com/food-api/docs#Authentication) for getting one: ",
Fore.GREEN)
print("--------------------------ENTER CUISINE----------------------------")
print("1. Indian \t\t 7. European \t\t 13. Cajun")
print("2. Asian \t\t 8. German \t\t 14. Middle Eastern")
print("3. British \t\t 9. Korean \t\t 15. Latin American")
print("4. Mexican \t\t 10. Irish \t\t 16. Thai")
print("5. Italian \t\t 11. American \t\t 17. Vietnamese")
print("6. Chinese \t\t 12. Mediterranean \t 18. Jewish")
cuisine_dict = {
"1": "Indian",
"2": "Asian",
"3": "British",
"4": "Mexican",
"5": "Italian",
"6": "Chinese",
"7": "European",
"8": "German",
"9": "Korean",
"10": "Irish",
"11": "American",
"12": "Mediterranean",
"13": "Cajun",
"14": "Middle Eastern",
"15": "Latin American",
"16": "Thai",
"17": "Vietnamese",
"18": "Jewish"
}
cuisine_input = jarvis.input("Enter Cuisine (no. of the cuisine): ", Fore.RED)
# Check if the user's input exists in the dictionary
if cuisine_input in cuisine_dict:
cuisine = cuisine_dict[cuisine_input]
print("Selected cuisine:", cuisine)
else:
print("Invalid input. Please enter a valid choice.")
METHOD_NAME(user_api_key, cuisine)
def METHOD_NAME(apiKey, cuisine):
url = f"https://api.spoonacular.com/recipes/complexSearch?apiKey={apiKey}&cuisine={cuisine}&includeNutrition=true."
response = requests.get(url)
# print(response)
if response.status_code == 200:
content = response.json()
# for debugging purposes, need import json
# with open("contentFood.json", "w") as f:
# json.dump(content, f, indent=2)
# extracting titles
titles = []
for item in content["results"]:
titles.append(item["title"])
print("--------------------------FOOD ITEM----------------------------")
for index, title in enumerate(titles, 1):
print(f"{index}. {title}")
# Asking the user to select a title
while True:
try:
selected_index = int(input("Enter the number corresponding to the title you want: ", ))
print()
if 1 <= selected_index <= len(titles):
break
else:
print("Invalid selection. Please enter a valid number.", Fore.RED)
except ValueError:
print("Invalid input. Please enter a number.", Fore.RED)
# Get the ID number for the selected title
selectedTitle = titles[selected_index - 1]
for item in content['results']:
if item['title'] == selectedTitle:
selectedId = item['id']
break
print(f"Selected Title: {selectedTitle}", Fore.CYAN)
# print(f"ID Number for Selected Title: {selected_id}")
# now get the recipe info from the id
url2 = f"https://api.spoonacular.com/recipes/{selectedId}/information?apiKey={apiKey}&includeNutrition=false"
responseRecipeInformation = requests.get(url2)
if responseRecipeInformation.status_code == 200:
content2 = responseRecipeInformation.json()
# debugging purposes again
# with open("recipeInfo.json", "w") as f2:
# json.dump(content2, f2, indent=2)
"""
below code gets the ingredients, summary and description
uses the beautiful soup module for cutting out the html tags.
source code has been outputted for reference
"""
summary_html = content2['summary']
# Cleaning the summary text from HTML tags (cutting out the html tags)
soup = BeautifulSoup(summary_html, 'html.parser')
summary_text = soup.get_text()
source_url = content2['sourceUrl']
description = content2['analyzedInstructions'][0]['steps'][0][
'step']
print("Summary:")
print(summary_text)
print("\nDescription:")
print(description)
print("\nSource URL:")
print(source_url)
# Extracting the list of ingredients with their corresponding aisle
ingredients = content2['extendedIngredients']
print("\nIngredients:")
for ingredient in ingredients:
original_value = ingredient['original']
aisle = ingredient['aisle']
print(f"- {original_value} (Aisle: {aisle})", Fore.GREEN)
print("\nHope you enjoy your food!", Fore.BLUE)
else:
print("Network down. Please try again later.")
else:
print("Network down. Please try again later.")
return | null |
208 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class UpgradeDBInstanceMajorVersionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'UpgradeDBInstanceMajorVersion')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DBInstanceStorage(self): # Integer
return self.get_query_params().get('DBInstanceStorage')
def set_DBInstanceStorage(self, DBInstanceStorage): # Integer
self.add_query_param('DBInstanceStorage', DBInstanceStorage)
def get_ZoneIdSlave1(self): # String
return self.get_query_params().get('ZoneIdSlave1')
def set_ZoneIdSlave1(self, ZoneIdSlave1): # String
self.add_query_param('ZoneIdSlave1', ZoneIdSlave1)
def get_ZoneIdSlave2(self): # String
return self.get_query_params().get('ZoneIdSlave2')
def set_ZoneIdSlave2(self, ZoneIdSlave2): # String
self.add_query_param('ZoneIdSlave2', ZoneIdSlave2)
def get_SwitchTimeMode(self): # String
return self.get_query_params().get('SwitchTimeMode')
def set_SwitchTimeMode(self, SwitchTimeMode): # String
self.add_query_param('SwitchTimeMode', SwitchTimeMode)
def get_SwitchOver(self): # String
return self.get_query_params().get('SwitchOver')
def set_SwitchOver(self, SwitchOver): # String
self.add_query_param('SwitchOver', SwitchOver)
def get_CollectStatMode(self): # String
return self.get_query_params().get('CollectStatMode')
def set_CollectStatMode(self, CollectStatMode): # String
self.add_query_param('CollectStatMode', CollectStatMode)
def get_SwitchTime(self): # String
return self.get_query_params().get('SwitchTime')
def set_SwitchTime(self, SwitchTime): # String
self.add_query_param('SwitchTime', SwitchTime)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_DBInstanceStorageType(self): # String
return self.get_query_params().get('DBInstanceStorageType')
def set_DBInstanceStorageType(self, DBInstanceStorageType): # String
self.add_query_param('DBInstanceStorageType', DBInstanceStorageType)
def get_Period(self): # String
return self.get_query_params().get('Period')
def set_Period(self, Period): # String
self.add_query_param('Period', Period)
def get_UsedTime(self): # String
return self.get_query_params().get('UsedTime')
def set_UsedTime(self, UsedTime): # String
self.add_query_param('UsedTime', UsedTime)
def get_DBInstanceClass(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_PrivateIpAddress(self): # String
return self.get_query_params().get('PrivateIpAddress')
def set_PrivateIpAddress(self, PrivateIpAddress): # String
self.add_query_param('PrivateIpAddress', PrivateIpAddress)
def get_VPCId(self): # String
return self.get_query_params().get('VPCId')
def set_VPCId(self, VPCId): # String
self.add_query_param('VPCId', VPCId)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType)
def get_InstanceNetworkType(self): # String
return self.get_query_params().get('InstanceNetworkType')
def METHOD_NAME(self, InstanceNetworkType): # String
self.add_query_param('InstanceNetworkType', InstanceNetworkType)
def get_TargetMajorVersion(self): # String
return self.get_query_params().get('TargetMajorVersion')
def set_TargetMajorVersion(self, TargetMajorVersion): # String
self.add_query_param('TargetMajorVersion', TargetMajorVersion) | null |
209 | # coding=utf-8
# Copyright 2023 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Lazy imports for heavy dependencies."""
import functools
import importlib
from typing import Any, Callable, TypeVar
from tensorflow_datasets.core.utils import py_utils as utils
_Fn = TypeVar("_Fn")
def _try_import(module_name):
"""Try importing a module, with an informative error message on failure."""
try:
mod = importlib.import_module(module_name)
return mod
except ImportError as e:
err_msg = (
"Failed importing {name}. This likely means that the dataset "
"requires additional dependencies that have to be "
"manually installed (usually with `pip install {name}`). See "
"setup.py extras_require."
).format(name=module_name)
utils.reraise(e, suffix=err_msg)
class LazyImporter(object):
"""Lazy importer for heavy dependencies.
Some datasets require heavy dependencies for data generation. To allow for
the default installation to remain lean, those heavy dependencies are
lazily imported here.
"""
@utils.classproperty
@classmethod
def apache_beam(cls):
return _try_import("apache_beam")
@utils.classproperty
@classmethod
def METHOD_NAME(cls):
return _try_import("bs4")
@utils.classproperty
@classmethod
def crepe(cls):
return _try_import("crepe")
@utils.classproperty
@classmethod
def cv2(cls):
return _try_import("cv2")
@utils.classproperty
@classmethod
def datasets(cls):
return _try_import("datasets")
@utils.classproperty
@classmethod
def envlogger(cls):
return _try_import("envlogger.reader")
@utils.classproperty
@classmethod
def gcsfs_store(cls):
return _try_import("gcsfs").GCSFileSystem(token='anon').get_mapper
@utils.classproperty
@classmethod
def gcld3(cls):
return _try_import("gcld3") # pylint: disable=unreachable
@utils.classproperty
@classmethod
def h5py(cls):
return _try_import("h5py")
@utils.classproperty
@classmethod
def jax(cls):
return _try_import("jax")
@utils.classproperty
@classmethod
def langdetect(cls):
return _try_import("langdetect")
@utils.classproperty
@classmethod
def librosa(cls):
return _try_import("librosa")
@utils.classproperty
@classmethod
def lxml(cls):
return _try_import("lxml")
@utils.classproperty
@classmethod
def matplotlib(cls):
_try_import("matplotlib.pyplot")
return _try_import("matplotlib")
@utils.classproperty
@classmethod
def mwparserfromhell(cls):
return _try_import("mwparserfromhell")
@utils.classproperty
@classmethod
def mwxml(cls):
return _try_import("mwxml")
@utils.classproperty
@classmethod
def networkx(cls):
return _try_import("networkx")
@utils.classproperty
@classmethod
def nltk(cls):
return _try_import("nltk")
@utils.classproperty
@classmethod
def pandas(cls):
return _try_import("pandas")
@utils.classproperty
@classmethod
def PIL_Image(cls): # pylint: disable=invalid-name
# TiffImagePlugin need to be activated explicitly on some systems
# https://github.com/python-pillow/Pillow/blob/5.4.x/src/PIL/Image.py#L407
_try_import("PIL.TiffImagePlugin")
return _try_import("PIL.Image")
@utils.classproperty
@classmethod
def PIL_ImageDraw(cls): # pylint: disable=invalid-name
return _try_import("PIL.ImageDraw")
@utils.classproperty
@classmethod
def pretty_midi(cls):
return _try_import("pretty_midi")
@utils.classproperty
@classmethod
def pycocotools(cls):
return _try_import("pycocotools.mask")
@utils.classproperty
@classmethod
def pydub(cls):
return _try_import("pydub")
@utils.classproperty
@classmethod
def scipy(cls):
_try_import("scipy.io")
_try_import("scipy.io.wavfile")
_try_import("scipy.ndimage")
return _try_import("scipy")
@utils.classproperty
@classmethod
def skimage(cls):
_try_import("skimage.color")
_try_import("skimage.filters")
try:
_try_import("skimage.external.tifffile")
except ImportError:
pass
return _try_import("skimage")
@utils.classproperty
@classmethod
def tifffile(cls):
return _try_import("tifffile")
@utils.classproperty
@classmethod
def tensorflow_data_validation(cls):
return _try_import("tensorflow_data_validation")
@utils.classproperty
@classmethod
def tensorflow_io(cls):
return _try_import("tensorflow_io")
@utils.classproperty
@classmethod
def tldextract(cls):
return _try_import("tldextract")
@utils.classproperty
@classmethod
def os(cls):
"""For testing purposes only."""
return _try_import("os")
@utils.classproperty
@classmethod
def test_foo(cls):
"""For testing purposes only."""
return _try_import("test_foo")
@utils.classproperty
@classmethod
def zarr(cls):
return _try_import("zarr")
@utils.classproperty
@classmethod
def conllu(cls):
return _try_import("conllu")
@utils.classproperty
@classmethod
def huggingface_hub(cls):
return _try_import("huggingface_hub")
lazy_imports = LazyImporter # pylint: disable=invalid-name
def beam_ptransform_fn(fn: Callable[..., Any]) -> Callable[..., Any]:
"""Lazy version of `@beam.ptransform_fn`."""
lazy_decorated_fn = None
@functools.wraps(fn)
def decorated(*args, **kwargs):
nonlocal lazy_decorated_fn
# Actually decorate the function only the first time it is called
if lazy_decorated_fn is None:
lazy_decorated_fn = lazy_imports.apache_beam.ptransform_fn(fn)
return lazy_decorated_fn(*args, **kwargs)
return decorated | null |
210 | import io
import random
import math
import IMP
import IMP.test
import IMP.atom
import IMP.core
from test_coulomb import place_xyzs
def make_test_pair_score(min_distance=9.0, max_distance=10.0):
m = IMP.Model()
p0 = m.add_particle("p0")
sph = IMP.algebra.Sphere3D(IMP.algebra.Vector3D(0, 0, 0), 1.0)
IMP.core.XYZR.setup_particle(m, p0, sph)
d0 = IMP.atom.LennardJones.setup_particle(m, p0, 1.0)
p1 = m.add_particle("p1")
IMP.core.XYZR.setup_particle(m, p1, sph)
d1 = IMP.atom.LennardJones.setup_particle(m, p1, 1.0)
sm = IMP.atom.ForceSwitch(min_distance, max_distance)
c = IMP.atom.LennardJonesPairScore(sm)
r = IMP.core.PairRestraint(m, c, (p0, p1))
sf = IMP.core.RestraintsScoringFunction([r])
return m, sf, d0, d1, c
class Tests(IMP.test.TestCase):
"""Test the LennardJonesPairScore"""
def test_get_set(self):
"""Check LennardJonesPairScore get/set methods"""
sm = IMP.atom.ForceSwitch(9.0, 10.0)
c = IMP.atom.LennardJonesPairScore(sm)
self.assertEqual(c.get_repulsive_weight(), 1.0)
c.set_repulsive_weight(5.0)
self.assertEqual(c.get_repulsive_weight(), 5.0)
self.assertEqual(c.get_attractive_weight(), 1.0)
c.set_attractive_weight(10.0)
self.assertEqual(c.get_attractive_weight(), 10.0)
def test_value(self):
"""Check score value of LennardJonesPairScore"""
m, sf, d0, d1, c = make_test_pair_score()
box = IMP.algebra.Vector3D(10.0, 20.0, 30.0)
for r0 in (2.0, 1.0):
d0.set_radius(r0)
for r1 in (2.0, 1.0):
d1.set_radius(r1)
rmin = r0 + r1
for wd0 in (0.0, 1.0, 2.0):
d0.set_well_depth(wd0)
for wd1 in (0.0, 1.0, 2.0):
d1.set_well_depth(wd1)
wd = math.sqrt(wd0 * wd1)
for att in (0.0, 0.5, 1.0):
c.set_attractive_weight(att)
for rep in (0.0, 0.5, 1.0):
c.set_repulsive_weight(rep)
for r in (3.0, 4.0, 5.0):
place_xyzs(d0, d1, box, r)
score = sf.evaluate(False)
expected = wd * (rep * (rmin / r) ** 12
- 2.0 * att * (rmin / r) ** 6)
self.assertAlmostEqual(score, expected,
delta=1e-2)
def METHOD_NAME(self):
"""Check derivatives of LennardJonesPairScore"""
m, sf, d0, d1, c = make_test_pair_score(4.0, 6.0)
# Place one particle at the origin and the other at a random position
# between 1 and 6 angstroms away (not too close since the derivatives
# are too large there)
d0.set_coordinates(IMP.algebra.Vector3D(0, 0, 0))
d1.set_coordinates(IMP.algebra.get_random_vector_on(IMP.algebra.get_unit_sphere_3d())
* (random.random() * 5.0 + 1.0))
self.assertXYZDerivativesInTolerance(sf, d0, 2.0, 5.0)
self.assertXYZDerivativesInTolerance(sf, d1, 2.0, 5.0)
def test_smoothing(self):
"""Check smoothing of LennardJonesPairScore"""
m, sf, d0, d1, c = make_test_pair_score()
smm, smsf, smd0, smd1, smc = make_test_pair_score(min_distance=4.0,
max_distance=5.0)
box = IMP.algebra.Vector3D(10.0, 20.0, 30.0)
def place_all(dist):
place_xyzs(d0, d1, box, dist)
smd0.set_coordinates(d0.get_coordinates())
smd1.set_coordinates(d1.get_coordinates())
# For dist <= min_distance, scores should be identical
for dist in (3.0, 3.5, 4.0):
place_all(dist)
self.assertAlmostEqual(sf.evaluate(False), smsf.evaluate(False),
delta=1e-6)
# For dist > max_distance, smoothed score should be zero
place_all(5.5)
self.assertEqual(smsf.evaluate(False), 0.0)
self.assertNotEqual(sf.evaluate(False), 0.0)
if __name__ == '__main__':
IMP.test.main() | null |
211 | #
# Copyright (c) 2018-2020 Red Hat, Inc.
#
# This file is part of nmstate
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import logging
import os
import subprocess
import tempfile
import pytest
import libnmstate
from libnmstate.schema import DNS
from libnmstate.schema import Route
from libnmstate.schema import RouteRule
from .testlib import ifacelib
REPORT_HEADER = """RPMs: {rpms}
OS: {osname}
nmstate: {nmstate_version}
"""
def pytest_configure(config):
config.addinivalue_line("markers", "slow: mark time consuming test")
config.addinivalue_line("markers", "tier2")
config.addinivalue_line("markers", "tier1")
def pytest_addoption(parser):
parser.addoption(
"--runslow", action="store_true", default=False, help="run slow tests"
)
def METHOD_NAME(config, items):
if not config.getoption("--runslow"):
# --runslow is not in cli: skip slow tests
_mark_skip_slow_tests(items)
_mark_tier2_tests(items)
def _mark_skip_slow_tests(items):
skip_slow = pytest.mark.skip(reason="need --runslow option to run")
for item in items:
if "slow" in item.keywords:
item.add_marker(skip_slow)
def _mark_tier2_tests(items):
for item in items:
if "tier1" not in item.keywords:
item.add_marker(pytest.mark.tier2)
@pytest.fixture(scope="session", autouse=True)
def fix_ip_netns_issue(scope="session", autouse=True):
if os.getenv("CI"):
with tempfile.TemporaryDirectory() as tmpdirname:
subprocess.run(
f"mount -t sysfs --make-private {tmpdirname}".split(),
check=True,
)
yield
subprocess.run(f"umount {tmpdirname}".split())
else:
yield
@pytest.fixture(scope="session", autouse=True)
def test_env_setup():
_logging_setup()
old_state = libnmstate.show()
old_state = _remove_interfaces_from_env(old_state)
_remove_dns_route_route_rule()
_ethx_init()
yield
restore_old_state(old_state)
def _remove_dns_route_route_rule():
"""
Remove existing DNS, routes, route rules in case it interference tests.
"""
libnmstate.apply(
{
DNS.KEY: {DNS.CONFIG: {}},
Route.KEY: {
Route.CONFIG: [{Route.STATE: Route.STATE_ABSENT}],
},
RouteRule.KEY: {RouteRule.CONFIG: []},
},
verify_change=False,
)
def _logging_setup():
logging.basicConfig(
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
level=logging.DEBUG,
)
def _ethx_init():
"""Remove any existing definitions on the ethX interfaces."""
ifacelib.ifaces_init("eth1", "eth2")
def _remove_interfaces_from_env(state):
"""
Remove references from interfaces passed to environment variable
NMSTATE_TEST_IGNORE_IFACE.
"""
ignore_iface = os.getenv("NMSTATE_TEST_IGNORE_IFACE")
if ignore_iface is None:
return state
state["interfaces"] = [
i for i in state["interfaces"] if ignore_iface not in i["name"]
]
state["routes"]["config"] = [
r
for r in state["routes"]["config"]
if ignore_iface not in r["next-hop-interface"]
]
return state
@pytest.fixture(scope="function")
def eth1_up():
with ifacelib.iface_up("eth1") as ifstate:
yield ifstate
@pytest.fixture(scope="function")
def eth2_up():
with ifacelib.iface_up("eth2") as ifstate:
yield ifstate
port0_up = eth1_up
port1_up = eth2_up
def pytest_report_header(config):
return REPORT_HEADER.format(
rpms=_get_package_nvr("NetworkManager"),
osname=_get_osname(),
nmstate_version=_get_nmstate_version(),
)
def _get_nmstate_version():
"""
Prefer RPM version of nmstate, if not found, use libnmstate module version
"""
try:
return _get_package_nvr("nmstate")
except subprocess.CalledProcessError:
return libnmstate.__version__
def _get_package_nvr(package):
return (
subprocess.check_output(["rpm", "-q", package]).strip().decode("utf-8")
)
def _get_osname():
with open("/etc/os-release") as os_release:
for line in os_release.readlines():
if line.startswith("PRETTY_NAME="):
return line.split("=", maxsplit=1)[1].strip().strip('"')
return ""
# Only restore the interface with IPv4/IPv6 gateway with IP/DNS config only
# For test machine, it is expected to lose configurations
def restore_old_state(old_state):
gw_routes = [
rt
for rt in old_state["routes"].get("config", [])
if rt["destination"] in ("0.0.0.0/0", "::/0")
]
gw_ifaces = [rt["next-hop-interface"] for rt in gw_routes]
desire_state = {
"interfaces": [],
"routes": {"config": gw_routes},
"dns-resolver": old_state.get("dns-resolver", {}),
}
for iface_name in gw_ifaces:
for iface in old_state["interfaces"]:
if iface["name"] in gw_ifaces:
if iface["state"] == "up":
desire_state["interfaces"].append(
{
"name": iface["name"],
"type": iface["type"],
"ipv4": iface["ipv4"],
"ipv6": iface["ipv6"],
}
)
if len(desire_state["interfaces"]):
libnmstate.apply(desire_state, verify_change=False) | null |
212 | import json
from bs4 import BeautifulSoup
from puppetboard import app
from test import MockDbQuery
def test_radiator_view(client, mocker,
mock_puppetdb_environments,
mock_puppetdb_default_nodes):
query_data = {
'nodes': [[{'count': 10}]],
'resources': [[{'count': 40}]],
}
dbquery = MockDbQuery(query_data)
mocker.patch.object(app.puppetdb, '_query', side_effect=dbquery.get)
rv = client.get('/radiator')
assert rv.status_code == 200
soup = BeautifulSoup(rv.data, 'html.parser')
assert soup.title.contents[0] == 'Puppetboard'
assert soup.h1 != 'Not Found'
total = soup.find(class_='total')
assert '10' in total.text
def test_radiator_view_all(client, mocker,
mock_puppetdb_environments,
mock_puppetdb_default_nodes):
# starting with v6.9.1 they changed the metric API to v2
# and a totally different format
base_str = 'puppetlabs.puppetdb.population:'
query_data = {
'version': [{'version': '6.9.1'}],
'metrics': [
{
'validate': {
'data': {
'value': {'Value': '50'}
},
'checks': {
'path': '%sname=num-nodes' % base_str
}
}
},
{
'validate': {
'data': {
'value': {'Value': '60'}
},
'checks': {
'path': '%sname=num-resources' % base_str
}
}
},
{
'validate': {
'data': {
'value': {'Value': 60.3}
},
'checks': {
'path': '%sname=avg-resources-per-node' % base_str
}
}
}
]
}
dbquery = MockDbQuery(query_data)
mocker.patch.object(app.puppetdb, '_query', side_effect=dbquery.get)
rv = client.get('/%2A/radiator')
assert rv.status_code == 200
soup = BeautifulSoup(rv.data, 'html.parser')
assert soup.title.contents[0] == 'Puppetboard'
assert soup.h1 != 'Not Found'
total = soup.find(class_='total')
assert '50' in total.text
def test_radiator_view_json(client, mocker,
mock_puppetdb_environments,
mock_puppetdb_default_nodes):
query_data = {
'nodes': [[{'count': 10}]],
'resources': [[{'count': 40}]],
}
dbquery = MockDbQuery(query_data)
mocker.patch.object(app.puppetdb, '_query', side_effect=dbquery.get)
rv = client.get('/radiator', headers={'Accept': 'application/json'})
assert rv.status_code == 200
json_data = json.loads(rv.data.decode('utf-8'))
assert json_data['unreported'] == 1
assert json_data['noop'] == 1
assert json_data['failed'] == 1
assert json_data['changed'] == 1
assert json_data['unchanged'] == 1
def METHOD_NAME(client, mocker,
mock_puppetdb_environments,
mock_puppetdb_default_nodes):
query_data = {
'nodes': [[{'count': 10}]],
'resources': [[{'count': 40}]],
}
dbquery = MockDbQuery(query_data)
mocker.patch.object(app.puppetdb, '_query', side_effect=dbquery.get)
rv = client.get('/nothere/radiator')
assert rv.status_code == 404
soup = BeautifulSoup(rv.data, 'html.parser')
assert soup.title.contents[0] == 'Puppetboard'
assert soup.h1.text == 'Not Found'
def test_radiator_view_division_by_zero(client, mocker,
mock_puppetdb_environments,
mock_puppetdb_default_nodes):
query_data = {
'nodes': [[{'count': 0}]],
'resources': [[{'count': 40}]],
}
dbquery = MockDbQuery(query_data)
mocker.patch.object(app.puppetdb, '_query', side_effect=dbquery.get)
rv = client.get('/radiator')
assert rv.status_code == 200
soup = BeautifulSoup(rv.data, 'html.parser')
assert soup.title.contents[0] == 'Puppetboard'
total = soup.find(class_='total')
assert '0' in total.text | null |
213 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdatePrivateAccessPolicyRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'csas', '2023-01-20', 'UpdatePrivateAccessPolicy')
self.set_method('POST')
def get_Description(self): # String
return self.get_body_params().get('Description')
def set_Description(self, Description): # String
self.add_body_params('Description', Description)
def get_PolicyId(self): # String
return self.get_body_params().get('PolicyId')
def set_PolicyId(self, PolicyId): # String
self.add_body_params('PolicyId', PolicyId)
def get_CustomUserAttributes(self): # Array
return self.get_body_params().get('CustomUserAttributes')
def set_CustomUserAttributes(self, CustomUserAttributes): # Array
for index1, value1 in enumerate(CustomUserAttributes):
if value1.get('UserGroupType') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.UserGroupType', value1.get('UserGroupType'))
if value1.get('IdpId') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.IdpId', value1.get('IdpId'))
if value1.get('Value') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.Value', value1.get('Value'))
if value1.get('Relation') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.Relation', value1.get('Relation'))
def get_TagIds(self): # Array
return self.get_body_params().get('TagIds')
def set_TagIds(self, TagIds): # Array
for index1, value1 in enumerate(TagIds):
self.add_body_params('TagIds.' + str(index1 + 1), value1)
def get_UserGroupIds(self): # Array
return self.get_body_params().get('UserGroupIds')
def set_UserGroupIds(self, UserGroupIds): # Array
for index1, value1 in enumerate(UserGroupIds):
self.add_body_params('UserGroupIds.' + str(index1 + 1), value1)
def METHOD_NAME(self): # String
return self.get_body_params().get('PolicyAction')
def set_PolicyAction(self, PolicyAction): # String
self.add_body_params('PolicyAction', PolicyAction)
def get_Priority(self): # Integer
return self.get_body_params().get('Priority')
def set_Priority(self, Priority): # Integer
self.add_body_params('Priority', Priority)
def get_ApplicationIds(self): # Array
return self.get_body_params().get('ApplicationIds')
def set_ApplicationIds(self, ApplicationIds): # Array
for index1, value1 in enumerate(ApplicationIds):
self.add_body_params('ApplicationIds.' + str(index1 + 1), value1)
def get_UserGroupMode(self): # String
return self.get_body_params().get('UserGroupMode')
def set_UserGroupMode(self, UserGroupMode): # String
self.add_body_params('UserGroupMode', UserGroupMode)
def get_ModifyType(self): # String
return self.get_body_params().get('ModifyType')
def set_ModifyType(self, ModifyType): # String
self.add_body_params('ModifyType', ModifyType)
def get_ApplicationType(self): # String
return self.get_body_params().get('ApplicationType')
def set_ApplicationType(self, ApplicationType): # String
self.add_body_params('ApplicationType', ApplicationType)
def get_Status(self): # String
return self.get_body_params().get('Status')
def set_Status(self, Status): # String
self.add_body_params('Status', Status) | null |
214 | """Datumaro Helper."""
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
# pylint: disable=invalid-name
import os
from typing import List, Optional, Tuple, Union
import datumaro
from datumaro.components.dataset import Dataset, DatasetSubset
from datumaro.components.dataset_base import DatasetItem
from datumaro.plugins.splitter import Split
class DatasetManager:
"""The aim of DatasetManager is support datumaro functions at easy use.
All kind of functions implemented in Datumaro are supported by this Manager.
Since DatasetManager just wraps Datumaro's function,
All methods are implemented as static method.
"""
@staticmethod
def get_train_dataset(dataset: Dataset) -> DatasetSubset:
"""Returns train dataset."""
subsets = dataset.subsets()
train_dataset = subsets.get("train", None)
if train_dataset is not None:
return train_dataset
for k, v in subsets.items():
if "train" in k or "default" in k:
return v
raise ValueError("Can't find training data.")
@staticmethod
def get_val_dataset(dataset: Dataset) -> Union[DatasetSubset, None]:
"""Returns validation dataset."""
subsets = dataset.subsets()
val_dataset = subsets.get("val", None)
if val_dataset is not None:
return val_dataset
for k, v in subsets.items():
if "val" in k:
return v
return None
@staticmethod
def get_data_format(data_root: str) -> str:
"""Find the format of dataset."""
data_root = os.path.abspath(data_root)
data_format: str = ""
# TODO #
# Currently, below `if/else` statements is mandatory
# because Datumaro can't detect the multi-cvat and mvtec.
# After, the upgrade of Datumaro, below codes will be changed.
if DatasetManager.is_cvat_format(data_root):
data_format = "multi-cvat"
elif DatasetManager.is_mvtec_format(data_root):
data_format = "mvtec"
else:
data_formats = datumaro.Environment().detect_dataset(data_root)
# TODO: how to avoid hard-coded part
data_format = data_formats[0] if "imagenet" not in data_formats else "imagenet"
print(f"[*] Detected dataset format: {data_format}")
return data_format
@staticmethod
def get_image_path(data_item: DatasetItem) -> Optional[str]:
"""Returns the path of image."""
if hasattr(data_item.media, "path"):
return data_item.media.path
return None
@staticmethod
def METHOD_NAME(dataset: Dataset, output_dir: str, data_format: str, save_media=True):
"""Export the Datumaro Dataset."""
return dataset.export(output_dir, data_format, save_media=save_media)
@staticmethod
def import_dataset(data_root: str, data_format: str, subset: Optional[str] = None) -> dict:
"""Import dataset."""
return Dataset.import_from(data_root, format=data_format, subset=subset)
@staticmethod
def auto_split(task: str, dataset: Dataset, split_ratio: List[Tuple[str, float]]) -> dict:
"""Automatically split the dataset: train --> train/val."""
splitter = Split(dataset, task.lower(), split_ratio)
return splitter.subsets()
@staticmethod
def is_cvat_format(path: str) -> bool:
"""Detect whether data path is CVAT format or not.
Currently, we used multi-video CVAT format for Action tasks.
This function can detect the multi-video CVAT format.
Multi-video CVAT format
root
|--video_0
|--images
|--frame0001.png
|--annotations.xml
|--video_1
|--video_2
will be deprecated soon.
"""
cvat_format = sorted(["images", "annotations.xml"])
for sub_folder in os.listdir(path):
# video_0, video_1, ...
sub_folder_path = os.path.join(path, sub_folder)
# files must be same with cvat_format
if os.path.isdir(sub_folder_path):
files = sorted(os.listdir(sub_folder_path))
if files != cvat_format:
return False
return True
@staticmethod
def is_mvtec_format(path: str) -> bool:
"""Detect whether data path is MVTec format or not.
Check the first-level architecture folder, to know whether the dataset is MVTec or not.
MVTec default structure like as below:
root
|--ground_truth
|--train
|--test
will be deprecated soon.
"""
mvtec_format = sorted(["ground_truth", "train", "test"])
folder_list = []
for sub_folder in os.listdir(path):
sub_folder_path = os.path.join(path, sub_folder)
# only use the folder name.
if os.path.isdir(sub_folder_path):
folder_list.append(sub_folder)
return sorted(folder_list) == mvtec_format | null |
215 | import random
from base_test import ArkoudaTest
from context import arkouda as ak
from arkouda import client_dtypes
class ClientDTypeTests(ArkoudaTest):
"""
Note: BitVector operations are not tested here because the class is
only a wrapper on a pdarray to display as such.
The class does not actually store values as bit-values,
it only converts to a bit representation for display.
Thus, pdarray testing covers these operations.
"""
def test_BitVector_creation(self):
arr = ak.arange(4)
bv = ak.BitVector(arr, width=3)
self.assertIsInstance(bv, client_dtypes.BitVector)
self.assertListEqual(bv.to_list(), ["...", "..|", ".|.", ".||"])
self.assertEqual(bv.dtype, ak.bitType)
# Test reversed
arr = ak.arange(4, dtype=ak.uint64) # Also test with uint64 input
bv = ak.BitVector(arr, width=3, reverse=True)
self.assertIsInstance(bv, client_dtypes.BitVector)
self.assertListEqual(bv.to_list(), ["...", "|..", ".|.", "||."])
self.assertEqual(bv.dtype, ak.bitType)
# test use of vectorizer function
arr = ak.arange(4)
bvectorizer = ak.BitVectorizer(3)
bv = bvectorizer(arr)
self.assertIsInstance(bv, client_dtypes.BitVector)
self.assertListEqual(bv.to_list(), ["...", "..|", ".|.", ".||"])
self.assertEqual(bv.dtype, ak.bitType)
# fail on argument types
with self.assertRaises(TypeError):
bv = ak.BitVector(17, width=4)
arr = ak.array([1.1, 8.3])
with self.assertRaises(TypeError):
bv - ak.BitVector(arr)
def test_Field_creation(self):
values = ak.arange(4)
names = ["8", "4", "2", "1"]
f = ak.Fields(values, names)
self.assertIsInstance(f, ak.Fields)
self.assertListEqual(f.to_list(), ["---- (0)", "---1 (1)", "--2- (2)", "--21 (3)"])
self.assertEqual(f.dtype, ak.bitType)
# Named fields with reversed bit order
values = ak.array([0, 1, 5, 8, 12])
names = ["Bit1", "Bit2", "Bit3", "Bit4"]
f = ak.Fields(values, names, MSB_left=False, separator="//")
expected = [
"----//----//----//----// (0)",
"Bit1//----//----//----// (1)",
"Bit1//----//Bit3//----// (5)",
"----//----//----//Bit4// (8)",
"----//----//Bit3//Bit4// (12)",
]
self.assertListEqual(f.to_list(), expected)
values = ak.arange(8, dtype=ak.uint64)
names = [f"Bit{x}" for x in range(65)]
with self.assertRaises(ValueError):
f = ak.Fields(values, names)
names = ["t", "t"]
with self.assertRaises(ValueError):
f = ak.Fields(values, names)
names = ["t", ""]
with self.assertRaises(ValueError):
f = ak.Fields(values, names)
names = ["abc", "123"]
with self.assertRaises(ValueError):
f = ak.Fields(values, names, separator="abc")
with self.assertRaises(ValueError):
f = ak.Fields(values, names)
with self.assertRaises(ValueError):
f = ak.Fields(values, names, pad="|!~", separator="//")
def test_ipv4_creation(self):
# Test handling of int64 input
ip_list = ak.array([3232235777], dtype=ak.int64)
ipv4 = ak.IPv4(ip_list)
self.assertIsInstance(ipv4, ak.IPv4)
self.assertListEqual(ipv4.to_list(), ["192.168.1.1"])
self.assertEqual(ipv4.dtype, ak.bitType)
# Test handling of uint64 input
ip_list = ak.array([3232235777], dtype=ak.uint64)
ipv4 = ak.IPv4(ip_list)
self.assertIsInstance(ipv4, ak.IPv4)
self.assertListEqual(ipv4.to_list(), ["192.168.1.1"])
self.assertEqual(ipv4.dtype, ak.bitType)
with self.assertRaises(TypeError):
ipv4 = ak.IPv4("3232235777")
with self.assertRaises(TypeError):
ipv4 = ak.IPv4(ak.array([3232235777.177]))
# Test handling of python dotted-quad input
ipv4 = ak.ip_address(["192.168.1.1"])
self.assertIsInstance(ipv4, ak.IPv4)
self.assertListEqual(ipv4.to_list(), ["192.168.1.1"])
self.assertEqual(ipv4.dtype, ak.bitType)
def METHOD_NAME(self):
ip_list = ak.array([3232235777])
ipv4 = ak.IPv4(ip_list)
ip_as_int = ipv4.normalize("192.168.1.1")
self.assertEqual(3232235777, ip_as_int)
def test_is_ipv4(self):
x = [random.getrandbits(32) for i in range(100)]
ans = ak.is_ipv4(ak.array(x, dtype=ak.uint64))
self.assertListEqual(ans.to_list(), [True] * 100)
ipv4 = ak.IPv4(ak.array(x))
self.assertListEqual(ak.is_ipv4(ipv4).to_list(), [True] * 100)
x = [random.getrandbits(64) if i < 5 else random.getrandbits(32) for i in range(10)]
ans = ak.is_ipv4(ak.array(x, ak.uint64))
self.assertListEqual(ans.to_list(), [i >= 5 for i in range(10)])
with self.assertRaises(TypeError):
ak.is_ipv4(ak.array(x, ak.float64))
with self.assertRaises(RuntimeError):
ak.is_ipv4(ak.array(x, dtype=ak.uint64), ak.arange(2, dtype=ak.uint64))
def test_is_ipv6(self):
x = [random.getrandbits(128) for i in range(100)]
low = ak.array([i & (2**64 - 1) for i in x], dtype=ak.uint64)
high = ak.array([i >> 64 for i in x], dtype=ak.uint64)
self.assertListEqual(ak.is_ipv6(high, low).to_list(), [True] * 100)
x = [random.getrandbits(64) if i < 5 else random.getrandbits(32) for i in range(10)]
ans = ak.is_ipv6(ak.array(x, ak.uint64))
self.assertListEqual(ans.to_list(), [i < 5 for i in range(10)])
with self.assertRaises(TypeError):
ak.is_ipv6(ak.array(x, ak.float64))
with self.assertRaises(RuntimeError):
ak.is_ipv6(ak.cast(ak.array(x), ak.int64), ak.cast(ak.arange(2), ak.int64)) | null |
216 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdts.endpoint import endpoint_data
class DescribeDtsJobsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dts', '2020-01-01', 'DescribeDtsJobs','dts')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_WithoutDbList(self): # Boolean
return self.get_query_params().get('WithoutDbList')
def set_WithoutDbList(self, WithoutDbList): # Boolean
self.add_query_param('WithoutDbList', WithoutDbList)
def get_OrderDirection(self): # String
return self.get_query_params().get('OrderDirection')
def set_OrderDirection(self, OrderDirection): # String
self.add_query_param('OrderDirection', OrderDirection)
def get_DedicatedClusterId(self): # String
return self.get_query_params().get('DedicatedClusterId')
def METHOD_NAME(self, DedicatedClusterId): # String
self.add_query_param('DedicatedClusterId', DedicatedClusterId)
def get_Type(self): # String
return self.get_query_params().get('Type')
def set_Type(self, Type): # String
self.add_query_param('Type', Type)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_OrderColumn(self): # String
return self.get_query_params().get('OrderColumn')
def set_OrderColumn(self, OrderColumn): # String
self.add_query_param('OrderColumn', OrderColumn)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_DtsBisLabel(self): # String
return self.get_query_params().get('DtsBisLabel')
def set_DtsBisLabel(self, DtsBisLabel): # String
self.add_query_param('DtsBisLabel', DtsBisLabel)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_DtsJobId(self): # String
return self.get_query_params().get('DtsJobId')
def set_DtsJobId(self, DtsJobId): # String
self.add_query_param('DtsJobId', DtsJobId)
def get_GroupId(self): # String
return self.get_query_params().get('GroupId')
def set_GroupId(self, GroupId): # String
self.add_query_param('GroupId', GroupId)
def get_Params(self): # String
return self.get_query_params().get('Params')
def set_Params(self, Params): # String
self.add_query_param('Params', Params)
def get_OwnerId(self): # String
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # String
self.add_query_param('OwnerId', OwnerId)
def get_JobType(self): # String
return self.get_query_params().get('JobType')
def set_JobType(self, JobType): # String
self.add_query_param('JobType', JobType)
def get_Tags(self): # String
return self.get_query_params().get('Tags')
def set_Tags(self, Tags): # String
self.add_query_param('Tags', Tags)
def get_Region(self): # String
return self.get_query_params().get('Region')
def set_Region(self, Region): # String
self.add_query_param('Region', Region)
def get_DtsInstanceId(self): # String
return self.get_query_params().get('DtsInstanceId')
def set_DtsInstanceId(self, DtsInstanceId): # String
self.add_query_param('DtsInstanceId', DtsInstanceId)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status) | null |
217 | # Copyright (c) ZenML GmbH 2022. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
from pathlib import Path
from zenml.client import Client
from zenml.config import DockerSettings
from zenml.integrations.sklearn import SKLEARN, SklearnIntegration
from zenml.utils.pipeline_docker_image_builder import (
PipelineDockerImageBuilder,
)
def METHOD_NAME():
"""Tests the setting of the user if configured."""
docker_settings = DockerSettings(user=None)
generated_dockerfile = (
PipelineDockerImageBuilder._generate_zenml_pipeline_dockerfile(
"image:tag",
docker_settings,
download_files=False,
)
)
assert "USER" not in generated_dockerfile
docker_settings = DockerSettings(user="test_user")
generated_dockerfile = (
PipelineDockerImageBuilder._generate_zenml_pipeline_dockerfile(
"image:tag",
docker_settings,
download_files=False,
)
)
assert "USER test_user" in generated_dockerfile
def test_requirements_file_generation(
mocker, local_stack, tmp_path: Path, sample_hub_plugin_response_model
):
"""Tests that the requirements get included in the correct order and only when configured."""
mocker.patch("subprocess.check_output", return_value=b"local_requirements")
mocker.patch.object(
local_stack, "requirements", return_value={"stack_requirements"}
)
mocker.patch(
"zenml._hub.client.HubClient.get_plugin",
return_value=sample_hub_plugin_response_model,
)
# just local requirements
settings = DockerSettings(
install_stack_requirements=False,
requirements=None,
required_integrations=[],
replicate_local_python_environment="pip_freeze",
)
files = PipelineDockerImageBuilder.gather_requirements_files(
settings, stack=local_stack
)
assert len(files) == 1
assert files[0][1] == "local_requirements"
# just stack requirements
settings = DockerSettings(
install_stack_requirements=True,
requirements=None,
required_integrations=[],
replicate_local_python_environment=None,
)
files = PipelineDockerImageBuilder.gather_requirements_files(
settings, stack=local_stack
)
assert len(files) == 1
assert files[0][1] == "stack_requirements"
# just user requirements
settings = DockerSettings(
install_stack_requirements=False,
requirements=["user_requirements"],
required_integrations=[],
replicate_local_python_environment=None,
)
files = PipelineDockerImageBuilder.gather_requirements_files(
settings, stack=local_stack
)
assert len(files) == 1
assert files[0][1] == "user_requirements"
# all values set
requirements_file = tmp_path / "requirements.txt"
requirements_file.write_text("user_requirements")
settings = DockerSettings(
install_stack_requirements=True,
requirements=str(requirements_file),
required_integrations=[SKLEARN],
required_hub_plugins=[sample_hub_plugin_response_model.name],
replicate_local_python_environment="pip_freeze",
)
files = PipelineDockerImageBuilder.gather_requirements_files(
settings, stack=local_stack
)
assert len(files) == 5
# first up the local python requirements
assert files[0][1] == "local_requirements"
# then the user requirements
assert files[1][1] == "user_requirements"
# then the integration requirements
expected_integration_requirements = "\n".join(
sorted(SklearnIntegration.REQUIREMENTS + ["stack_requirements"])
)
assert files[2][1] == expected_integration_requirements
# last the hub requirements
expected_hub_internal_requirements = (
f"-i {sample_hub_plugin_response_model.index_url}\n"
f"{sample_hub_plugin_response_model.package_name}"
)
assert files[3][1] == expected_hub_internal_requirements
expected_hub_pypi_requirements = "\n".join(
sample_hub_plugin_response_model.requirements
)
assert files[4][1] == expected_hub_pypi_requirements
def test_build_skipping():
"""Tests that the parent image is returned directly if `skip_build` is set
to `True`."""
settings = DockerSettings(skip_build=True, parent_image="my_parent_image")
image_digest, _, _ = PipelineDockerImageBuilder().build_docker_image(
docker_settings=settings,
tag="tag",
stack=Client().active_stack,
include_files=True,
download_files=False,
)
assert image_digest | null |
218 | # coding=utf-8
# Copyright 2018-2023 EvaDB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
import pandas
from evadb.database import EvaDBDatabase
from evadb.interfaces.relational.utils import (
create_limit_expression,
create_star_expression,
handle_select_clause,
sql_predicate_to_expresssion_tree,
sql_string_to_expresssion_list,
string_to_lateral_join,
try_binding,
)
from evadb.models.storage.batch import Batch
from evadb.parser.alias import Alias
from evadb.parser.select_statement import SelectStatement
from evadb.parser.statement import AbstractStatement
from evadb.parser.table_ref import JoinNode, TableRef
from evadb.parser.types import JoinType
from evadb.parser.utils import parse_sql_orderby_expr
from evadb.server.command_handler import execute_statement
class EvaDBQuery:
def __init__(
self,
evadb: EvaDBDatabase,
query_node: Union[AbstractStatement, TableRef],
alias: Alias = None,
):
self._evadb = evadb
self._query_node = query_node
self._alias = alias
def alias(self, alias: str) -> "EvaDBQuery":
"""Returns a new Relation with an alias set.
Args:
alias (str): an alias name to be set for the Relation.
Returns:
EvaDBQuery: Aliased Relation.
Examples:
>>> relation = cursor.table("sample_table")
>>> relation.alias('table')
"""
self._alias = Alias(alias)
def cross_apply(self, expr: str, alias: str) -> "EvaDBQuery":
"""Execute a expr on all the rows of the relation
Args:
expr (str): sql expression
alias (str): alias of the output of the expr
Returns:
`EvaDBQuery`: relation
Examples:
Runs Yolo on all the frames of the input table
>>> relation = cursor.table("videos")
>>> relation.cross_apply("Yolo(data)", "objs(labels, bboxes, scores)")
Runs Yolo on all the frames of the input table and unnest each object as separate row.
>>> relation.cross_apply("unnest(Yolo(data))", "obj(label, bbox, score)")
"""
assert self._query_node.from_table is not None
table_ref = string_to_lateral_join(expr, alias=alias)
join_table = TableRef(
JoinNode(
TableRef(self._query_node, alias=self._alias),
table_ref,
join_type=JoinType.LATERAL_JOIN,
)
)
self._query_node = SelectStatement(
target_list=create_star_expression(), from_table=join_table
)
# reset the alias as after join there isn't a single alias
self._alias = Alias("Relation")
try_binding(self._evadb.catalog, self._query_node)
return self
def df(self) -> pandas.DataFrame:
"""Execute and fetch all rows as a pandas DataFrame
Returns:
pandas.DataFrame:
"""
batch = self.execute()
assert batch is not None, "relation execute failed"
return batch.frames
def execute(self) -> Batch:
"""Transform the relation into a result set
Returns:
Batch: result as evadb Batch
"""
result = execute_statement(self._evadb, self._query_node.copy())
assert result.frames is not None
return result
def filter(self, expr: str) -> "EvaDBQuery":
"""
Filters rows using the given condition. Multiple filters can be chained using `AND`
Parameters:
expr (str): The filter expression.
Returns:
EvaDBQuery : Filtered EvaDBQuery.
Examples:
>>> relation = cursor.table("sample_table")
>>> relation.filter("col1 > 10")
Filter by sql string
>>> relation.filter("col1 > 10 AND col1 < 20")
"""
parsed_expr = sql_predicate_to_expresssion_tree(expr)
self._query_node = handle_select_clause(
self._query_node, self._alias, "where_clause", parsed_expr
)
try_binding(self._evadb.catalog, self._query_node)
return self
def limit(self, num: int) -> "EvaDBQuery":
"""Limits the result count to the number specified.
Args:
num (int): Number of records to return. Will return num records or all records if the Relation contains fewer records.
Returns:
EvaDBQuery: Relation with subset of records
Examples:
>>> relation = cursor.table("sample_table")
>>> relation.limit(10)
"""
limit_expr = create_limit_expression(num)
self._query_node = handle_select_clause(
self._query_node, self._alias, "limit_count", limit_expr
)
try_binding(self._evadb.catalog, self._query_node)
return self
def order(self, order_expr: str) -> "EvaDBQuery":
"""Reorder the relation based on the order_expr
Args:
order_expr (str): sql expression to order the relation
Returns:
EvaDBQuery: A EvaDBQuery ordered based on the order_expr.
Examples:
>>> relation = cursor.table("PDFs")
>>> relation.order("Similarity(SentenceTransformerFeatureExtractor('When was the NATO created?'), SentenceTransformerFeatureExtractor(data) ) DESC")
"""
parsed_expr = parse_sql_orderby_expr(order_expr)
self._query_node = handle_select_clause(
self._query_node, self._alias, "orderby_list", parsed_expr
)
try_binding(self._evadb.catalog, self._query_node)
return self
def METHOD_NAME(self, expr: str) -> "EvaDBQuery":
"""
Projects a set of expressions and returns a new EvaDBQuery.
Parameters:
exprs (Union[str, List[str]]): The expression(s) to be selected. If '*' is provided, it expands to all columns in the current EvaDBQuery.
Returns:
EvaDBQuery: A EvaDBQuery with subset (or all) of columns.
Examples:
>>> relation = cursor.table("sample_table")
Select all columns in the EvaDBQuery.
>>> relation.select("*")
Select all subset of columns in the EvaDBQuery.
>>> relation.select("col1")
>>> relation.select("col1, col2")
"""
parsed_exprs = sql_string_to_expresssion_list(expr)
self._query_node = handle_select_clause(
self._query_node, self._alias, "target_list", parsed_exprs
)
try_binding(self._evadb.catalog, self._query_node)
return self
def show(self) -> pandas.DataFrame:
"""Execute and fetch all rows as a pandas DataFrame
Returns:
pandas.DataFrame:
"""
batch = self.execute()
assert batch is not None, "relation execute failed"
return batch.frames
def sql_query(self) -> str:
"""Get the SQL query that is equivalent to the relation
Returns:
str: the sql query
Examples:
>>> relation = cursor.table("sample_table").project('i')
>>> relation.sql_query()
"""
return str(self._query_node) | null |
219 | ################################################################################
# Creme is a free/open-source Customer Relationship Management software
# Copyright (C) 2020-2022 Hybird
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Iterable, Iterator, NewType
from django.conf import settings
from creme.creme_core.utils.imports import safe_import_object
if TYPE_CHECKING:
from django.http import HttpResponse
from creme.creme_core.models import CremeEntity, FileRef
logger = logging.getLogger(__name__)
AGNOSTIC = 'AGNOSTIC'
FlavourId = str
EngineId = str
ExporterId = NewType('ExporterId', str)
class ExporterFlavour:
"""Variation to use for a given engine."""
def __init__(self, country='', language='', theme=''):
"""Constructor.
@param country: country code (e.g. 'FR').
@param language: language code (e.g. 'fr_FR').
@param theme: name of the theme
"""
self.country = country
self.language = language
self.theme = theme
def __eq__(self, other):
return (
self.country == other.country
and self.language == other.language
and self.theme == other.theme
)
def __repr__(self):
return f'ExporterFlavour("{self.country}", "{self.language}", "{self.theme}")'
@classmethod
def agnostic(cls) -> ExporterFlavour:
"A variation not related to a country or a language."
return cls(country=AGNOSTIC)
def METHOD_NAME(self) -> FlavourId:
"Get a string ID."
return FlavourId(f'{self.country}/{self.language}/{self.theme}')
@classmethod
def from_id(cls, flavour_id: FlavourId) -> ExporterFlavour:
"""Get an instance from an ID."""
return cls(*flavour_id.split('/', 2))
class BillingExporter:
"""Base class for exporters.
Exporters can take a billing entity model and produce output files
(generally PDF, by using third party libraries).
"""
ID_SEPARATOR = '|'
def __init__(self, *,
verbose_name: str,
engine: BillingExportEngine,
flavour: ExporterFlavour):
self.verbose_name = verbose_name
self.engine = engine
self.flavour = flavour
def export(self, *,
entity: CremeEntity,
user) -> FileRef | HttpResponse:
raise NotImplementedError
@property
def id(self) -> ExporterId:
return ExporterId(
f'{self.engine.id}{self.ID_SEPARATOR}{self.flavour.METHOD_NAME()}'
)
@property
def screenshots(self) -> Iterator[str]:
"""Get resources paths to screenshots.
Useful to illustrate choices in configurations GUI.
"""
raise NotImplementedError
class BillingExportEngine:
"""Base class for exporter engines.
An engine can create <BillingExporter> instances for a given variation (flavour).
"""
id: EngineId = EngineId('') # Use generate_id()
def __init__(self, model: type[CremeEntity]):
self.model = model
def exporter(self, flavour: ExporterFlavour) -> BillingExporter:
raise NotImplementedError
@property
def flavours(self) -> Iterator[ExporterFlavour]:
"""Different flavours an engine supports.
Used by the configuration GUI.
"""
raise NotImplementedError
@staticmethod
def generate_id(app_label: str, name: str) -> EngineId:
if BillingExporter.ID_SEPARATOR in name:
raise ValueError(
f'Invalid character for name: {BillingExporter.ID_SEPARATOR}'
)
return EngineId(f'{app_label}-{name}')
class BillingExportEngineManager:
"""Manage the list of all available types of engine."""
class InvalidEngineClass(Exception):
pass
def __init__(self, engine_paths: Iterable[str] | None = None):
"""Constructor.
@param engine_paths: paths to Python classes inheriting
<BillingExportEngine>. If <None>, the setting "BILLING_EXPORTERS"
is used.
"""
self.engine_paths = (
settings.BILLING_EXPORTERS
if engine_paths is None else
[*engine_paths]
)
@property
def engine_classes(self) -> Iterator[type[BillingExportEngine]]:
for path in self.engine_paths:
cls = safe_import_object(path)
if cls is None:
raise self.InvalidEngineClass(
f'"{path}" is an invalid path of <BillingExportEngine>.'
)
if not issubclass(cls, BillingExportEngine):
raise self.InvalidEngineClass(
f'{cls} is invalid, it is not a sub-class of <BillingExportEngine>.'
)
yield cls
def engine(self, *,
engine_id: EngineId,
model: type[CremeEntity]) -> BillingExportEngine | None:
for cls in self.engine_classes:
if cls.id == engine_id:
return cls(model)
return None
def exporter(self, *,
engine_id: EngineId,
flavour_id: FlavourId,
model: type[CremeEntity]) -> BillingExporter | None:
engine = self.engine(engine_id=engine_id, model=model)
return None if engine is None else engine.exporter(
flavour=ExporterFlavour.from_id(flavour_id),
) | null |
220 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkiot.endpoint import endpoint_data
class UpdateSubscribeRelationRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2018-01-20', 'UpdateSubscribeRelation')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_OtaEventFlag(self):
return self.get_query_params().get('OtaEventFlag')
def set_OtaEventFlag(self,OtaEventFlag):
self.add_query_param('OtaEventFlag',OtaEventFlag)
def get_DeviceTopoLifeCycleFlag(self):
return self.get_query_params().get('DeviceTopoLifeCycleFlag')
def set_DeviceTopoLifeCycleFlag(self,DeviceTopoLifeCycleFlag):
self.add_query_param('DeviceTopoLifeCycleFlag',DeviceTopoLifeCycleFlag)
def get_Type(self):
return self.get_query_params().get('Type')
def set_Type(self,Type):
self.add_query_param('Type',Type)
def get_DeviceLifeCycleFlag(self):
return self.get_query_params().get('DeviceLifeCycleFlag')
def set_DeviceLifeCycleFlag(self,DeviceLifeCycleFlag):
self.add_query_param('DeviceLifeCycleFlag',DeviceLifeCycleFlag)
def get_IotInstanceId(self):
return self.get_query_params().get('IotInstanceId')
def set_IotInstanceId(self,IotInstanceId):
self.add_query_param('IotInstanceId',IotInstanceId)
def get_DeviceStatusChangeFlag(self):
return self.get_query_params().get('DeviceStatusChangeFlag')
def set_DeviceStatusChangeFlag(self,DeviceStatusChangeFlag):
self.add_query_param('DeviceStatusChangeFlag',DeviceStatusChangeFlag)
def get_OtaVersionFlag(self):
return self.get_query_params().get('OtaVersionFlag')
def set_OtaVersionFlag(self,OtaVersionFlag):
self.add_query_param('OtaVersionFlag',OtaVersionFlag)
def METHOD_NAME(self):
return self.get_query_params().get('DeviceTagFlag')
def set_DeviceTagFlag(self,DeviceTagFlag):
self.add_query_param('DeviceTagFlag',DeviceTagFlag)
def get_ConsumerGroupIdss(self):
return self.get_query_params().get('ConsumerGroupIds')
def set_ConsumerGroupIdss(self, ConsumerGroupIdss):
for depth1 in range(len(ConsumerGroupIdss)):
if ConsumerGroupIdss[depth1] is not None:
self.add_query_param('ConsumerGroupIds.' + str(depth1 + 1) , ConsumerGroupIdss[depth1])
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey)
def get_ThingHistoryFlag(self):
return self.get_query_params().get('ThingHistoryFlag')
def set_ThingHistoryFlag(self,ThingHistoryFlag):
self.add_query_param('ThingHistoryFlag',ThingHistoryFlag)
def get_FoundDeviceListFlag(self):
return self.get_query_params().get('FoundDeviceListFlag')
def set_FoundDeviceListFlag(self,FoundDeviceListFlag):
self.add_query_param('FoundDeviceListFlag',FoundDeviceListFlag)
def get_OtaJobFlag(self):
return self.get_query_params().get('OtaJobFlag')
def set_OtaJobFlag(self,OtaJobFlag):
self.add_query_param('OtaJobFlag',OtaJobFlag)
def get_SubscribeFlags(self):
return self.get_query_params().get('SubscribeFlags')
def set_SubscribeFlags(self,SubscribeFlags):
self.add_query_param('SubscribeFlags',SubscribeFlags)
def get_DeviceDataFlag(self):
return self.get_query_params().get('DeviceDataFlag')
def set_DeviceDataFlag(self,DeviceDataFlag):
self.add_query_param('DeviceDataFlag',DeviceDataFlag)
def get_MnsConfiguration(self):
return self.get_query_params().get('MnsConfiguration')
def set_MnsConfiguration(self,MnsConfiguration):
self.add_query_param('MnsConfiguration',MnsConfiguration | null |
221 | # Copyright 2021-2023 AIPlan4EU project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unified_planning.shortcuts import *
from unified_planning.test import TestCase, main
from unified_planning.model.walkers import Substituter
from unified_planning.environment import get_environment
from unified_planning.exceptions import UPTypeError
class TestSubstituter(TestCase):
def METHOD_NAME(self):
TestCase.METHOD_NAME(self)
def test_id_walker(self):
s = get_environment().substituter
# small test on already-done expressions to check the id-dagwalker
x = FluentExp(Fluent("x"))
y = FluentExp(Fluent("y", IntType()))
t = Bool(True)
f = Bool(False)
subs: Dict[Expression, Expression] = {y: 3}
# ((25/5)*30*2*2) - (20*5) (500) == (25*4*10) / 2 (500)
e1 = Equals(
Minus(Times([Div(25, 5), 30, 2, 2]), Times(20, 5)), Div(Times(25, 4, 10), 2)
)
r1 = s.substitute(e1, subs)
self.assertEqual(r1, e1)
# T => !x
e2 = Implies(e1, Not(x))
r2 = s.substitute(e2)
self.assertEqual(r2, e2, subs)
# !x || (T => x)
e3 = Or(e2, Implies(e1, x))
r3 = s.substitute(e3, subs)
self.assertEqual(r3, e3)
def test_substitution(self):
s = Substituter(get_environment())
xfluent = Fluent("x", IntType())
x = FluentExp(xfluent)
subst: Dict[Expression, Expression] = {}
subst[x] = Int(5)
e1 = Plus(x, 1)
s1 = s.substitute(e1, subst)
self.assertEqual(s1, Plus(5, 1))
# Testing that (a & b) with sbust = {a <- c, (c & b) <- d, (a & b) <- c} is c
a = Fluent("a")
b = Fluent("b")
c = FluentExp(Fluent("c"))
d = Fluent("d")
subst = {}
subst[a] = c
subst[And(c, b)] = d
subst[And(a, b)] = c
e2 = And(a, b)
s2 = s.substitute(e2, subst)
self.assertEqual(s2, c)
with self.assertRaises(UPTypeError):
subst = {}
subst[a] = c
subst[And(c, b)] = d
subst[And(a, b)] = Int(5)
e3 = And(a, b)
s3 = s.substitute(e3, subst)
subst = {}
subst[a] = c
subst[And(c, b)] = d
e4 = And(a, b, And(c, b))
s4 = s.substitute(e4, subst)
self.assertEqual(s4, And(c, b, d))
subst = {}
subst[a] = c
subst[c] = d
e5 = And(a, c, And(a, c))
s5 = s.substitute(e5, subst)
self.assertEqual(s5, And(c, d, And(c, d)))
with self.assertRaises(UPTypeError):
subst = {}
subst[a] = c
subst[And(c, b)] = Plus(1, 2)
e6 = Int(1)
s.substitute(e6, subst)
def test_shadowing(self):
# Here is tested the correct shadowing of the variables in the quantifiers.
# The substituter should never substitute an expression containing a Variable bounded
# by the substituter.
UT = UserType("UT")
v1, v2, v3 = (Variable(f"v{i}", UT) for i in range(1, 4))
f = Fluent("f", RealType(), var=UT)
exp1 = Forall(Equals(f(v1), f(v2) + f(v3)), v1, v2)
exp2 = And(exp1, Forall(LT(f(v2), Plus(f(v3), f(v1))), v2, v3))
s1, s2, s3 = (Variable(f"s{i}", UT) for i in range(1, 4))
subs: Dict[Expression, Expression] = {
v1: s1,
v2: s2,
v3: s3,
}
# only v3 is changed with s3, because it's the only one not bound to a quantifier
test_sub_exp1 = Forall(Equals(f(v1), f(v2) + f(s3)), v1, v2)
# in the right part, only v1 is changed with s1, because it's the only one not bound to a quantifier
test_sub_exp2 = And(
test_sub_exp1, Forall(LT(f(v2), Plus(f(v3), f(s1))), v2, v3)
)
self.assertEqual(exp2.substitute(subs), test_sub_exp2)
# test nested quantifiers
exp1 = Forall(And(f(v2) > f(v1), Exists(Equals(f(v1), f(v2) + f(v3)), v2)), v1)
# v2 is changed with s2 in the Forall, while in the Exists only v3 is changed with s3
test_sub_exp1 = Forall(
And(f(s2) > f(v1), Exists(Equals(f(v1), f(v2) + f(s3)), v2)), v1
)
self.assertEqual(exp1.substitute(subs), test_sub_exp1)
exp1 = Equals(f(v2), f(v1))
self.assertEqual(exp1.substitute(subs), Equals(f(s2), f(s1)))
exp2 = And(Forall(exp1, v1), exp1)
self.assertEqual(
exp2.substitute(subs),
And(Forall(Equals(f(s2), f(v1)), v1), Equals(f(s2), f(s1))),
)
exp3 = And(exp1, Forall(exp1, v1))
self.assertEqual(
exp3.substitute(subs),
And(Equals(f(s2), f(s1)), Forall(Equals(f(s2), f(v1)), v1)),
)
b = Fluent("b", param=UT)
exp1 = And(b(v1), Forall(And(b(v1), b(v2)), v2))
subs = {b(v1): True}
test_sub_exp1 = Forall(b(v2), v2)
self.assertEqual(exp1.substitute(subs).simplify(), test_sub_exp1)
subs = {b(v2): True}
test_sub_exp1 = And(b(v1), Forall(And(b(v1), b(v2)), v2))
self.assertEqual(exp1.substitute(subs), test_sub_exp1)
if __name__ == "__main__":
main() | null |
222 | """Tests for Action Classification Task with OTX CLI"""
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
import os
import copy
from copy import deepcopy
import pytest
import torch
from otx.api.entities.model_template import parse_model_template
from otx.cli.registry import Registry
from tests.test_suite.e2e_test_system import e2e_pytest_component
from tests.test_suite.run_test_command import (
otx_eval_openvino_testing,
otx_eval_testing,
otx_export_testing,
otx_train_testing,
otx_resume_testing,
get_template_dir,
)
# Finetuning arguments
# TODO: Need to change sample dataset
args = {
"--train-data-roots": "tests/assets/cvat_dataset/action_classification/train",
"--val-data-roots": "tests/assets/cvat_dataset/action_classification/train",
"--test-data-roots": "tests/assets/cvat_dataset/action_classification/train",
"train_params": ["params", "--learning_parameters.num_iters", "1", "--learning_parameters.batch_size", "4"],
}
# Training params for resume, num_iters*2
resume_params = [
"params",
"--learning_parameters.num_iters",
"2",
"--learning_parameters.batch_size",
"4",
]
otx_dir = os.getcwd()
MULTI_GPU_UNAVAILABLE = torch.cuda.device_count() <= 1
TT_STABILITY_TESTS = os.environ.get("TT_STABILITY_TESTS", False)
if TT_STABILITY_TESTS:
default_template = parse_model_template(
os.path.join("src/otx/algorithms/action/configs", "classification", "x3d", "template.yaml")
)
templates = [default_template] * 100
templates_ids = [template.model_template_id + f"-{i+1}" for i, template in enumerate(templates)]
else:
templates = Registry("src/otx/algorithms/action").filter(task_type="ACTION_CLASSIFICATION").templates
templates_ids = [template.model_template_id for template in templates]
class TestToolsOTXActionClassification:
@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_train(self, template, tmp_dir_path):
tmp_dir_path = tmp_dir_path / "action_cls"
otx_train_testing(template, tmp_dir_path, otx_dir, args)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_resume(self, template, tmp_dir_path):
tmp_dir_path = tmp_dir_path / "action_cls/test_resume"
otx_resume_testing(template, tmp_dir_path, otx_dir, args)
template_work_dir = get_template_dir(template, tmp_dir_path)
args1 = copy.deepcopy(args)
args1["train_params"] = resume_params
args1[
"--resume-from"
] = f"{template_work_dir}/trained_for_resume_{template.model_template_id}/models/weights.pth"
otx_resume_testing(template, tmp_dir_path, otx_dir, args1)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_eval(self, template, tmp_dir_path):
tmp_dir_path = tmp_dir_path / "action_cls"
otx_eval_testing(template, tmp_dir_path, otx_dir, args)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_export(self, template, tmp_dir_path):
tmp_dir_path = tmp_dir_path / "action_cls"
otx_export_testing(template, tmp_dir_path)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_export_fp16(self, template, tmp_dir_path):
tmp_dir_path = tmp_dir_path / "action_cls"
otx_export_testing(template, tmp_dir_path, half_precision=True)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_export_onnx(self, template, tmp_dir_path):
tmp_dir_path = tmp_dir_path / "action_cls"
otx_export_testing(template, tmp_dir_path, half_precision=False, is_onnx=True)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_eval_openvino(self, template, tmp_dir_path):
if template.model_template_id == "Custom_Action_Classification_MoViNet":
pytest.xfail("Issue#2058: MoViNet inference fails in OV 2023.0")
tmp_dir_path = tmp_dir_path / "action_cls"
otx_eval_openvino_testing(template, tmp_dir_path, otx_dir, args, threshold=1.0)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
@pytest.mark.parametrize("bs_adapt_type", ["Safe", "Full"])
def test_otx_train_auto_adapt_batch_size(self, template, tmp_dir_path, bs_adapt_type):
adapting_bs_args = deepcopy(args)
adapting_bs_args["train_params"].extend(["--learning_parameters.auto_adapt_batch_size", bs_adapt_type])
tmp_dir_path = tmp_dir_path / f"action_cls_auto_adapt_{bs_adapt_type}_batch_size"
otx_train_testing(template, tmp_dir_path, otx_dir, adapting_bs_args)
@e2e_pytest_component
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def METHOD_NAME(self, template, tmp_dir_path):
adapting_num_workers_args = deepcopy(args)
adapting_num_workers_args["train_params"].extend(["--learning_parameters.auto_num_workers", "True"])
tmp_dir_path = tmp_dir_path / f"action_cls_auto_adapt_num_workers"
otx_train_testing(template, tmp_dir_path, otx_dir, adapting_num_workers_args)
@e2e_pytest_component
@pytest.mark.skipif(MULTI_GPU_UNAVAILABLE, reason="The number of gpu is insufficient")
@pytest.mark.skipif(TT_STABILITY_TESTS, reason="This is TT_STABILITY_TESTS")
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_multi_gpu_train(self, template, tmp_dir_path):
tmp_dir_path = tmp_dir_path / "action_cls/test_multi_gpu"
args1 = deepcopy(args)
args1["--gpus"] = "0,1"
otx_train_testing(template, tmp_dir_path, otx_dir, args1) | null |
223 | # Copyright (c) 2022 The Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from typing import Optional, List
from ...utils.requires import requires
from .abstract_core import AbstractCore
from ...isas import ISA
from ...runtime import get_runtime_isa
from ...utils.override import overrides
from ...utils.requires import requires
from m5.objects import (
BaseMMU,
Port,
BaseCPU,
Process,
PcCountTracker,
PcCountTrackerManager,
)
from m5.params import PcCountPair
class BaseCPUCore(AbstractCore):
"""
An stdlib AbstractCore subclass which wraps a BaseCPU SimObject type.
"""
def __init__(self, core: BaseCPU, isa: Optional[ISA] = None):
super().__init__()
# There is some annoying redundancy here. The BaseCPU type already
# defines the ISA, so here we are defining it twice. However, there
# currently isn't a good way to get the ISA from the BaseCPU Type.
if isa:
requires(isa_required=isa)
self._isa = isa
else:
self._isa = get_runtime_isa()
self.core = core
self.core.createThreads()
def get_simobject(self) -> BaseCPU:
return self.core
@overrides(AbstractCore)
def requires_send_evicts(self) -> bool:
if self.get_isa() in (ISA.ARM, ISA.X86):
# * The x86 `mwait`` instruction is built on top of coherence,
# therefore evictions must be sent from cache to the CPU Core.
#
# * The local exclusive monitor in ARM systems requires the sending
# of evictions from cache to the CPU Core.
return True
# The O3 model must keep the LSQ coherent with the caches.
# The code below will check to see if the current base CPU is of the O3
# type for the current ISA target (a bit ugly but it works).
try:
from m5.objects import BaseO3CPU
return isinstance(self.get_simobject(), BaseO3CPU)
except ImportError:
# If, for whatever reason, the BaseO3CPU is not importable, then
# the current core cannot be an an O3 CPU. We therefore return
# False.
return False
@overrides(AbstractCore)
def is_kvm_core(self) -> bool:
try:
from m5.objects import BaseKvmCPU
return isinstance(self.core, BaseKvmCPU)
except ImportError:
# If importing BaseKvmCPU throws an exception then it's because
# it's not compiled into the binary. If this is the case then this
# can't be a KVM core.
return False
def get_isa(self) -> ISA:
return self._isa
@overrides(AbstractCore)
def connect_icache(self, port: Port) -> None:
self.core.icache_port = port
@overrides(AbstractCore)
def connect_dcache(self, port: Port) -> None:
self.core.dcache_port = port
@overrides(AbstractCore)
def connect_walker_ports(self, port1: Port, port2: Port) -> None:
if self.get_isa() == ISA.ARM:
# Unlike X86 and RISCV MMU, the ARM MMU has two L1 TLB walker ports
# named `walker` and `stage2_walker` for both data and instruction.
# The gem5 standard library currently supports one TLB walker port
# per cache level. Therefore, we are explicitly setting the walker
# ports and not setting the stage2_walker ports for ARM systems.
self.core.mmu.itb_walker.port = port1
self.core.mmu.dtb_walker.port = port2
else:
self.core.mmu.connectWalkerPorts(port1, port2)
@overrides(AbstractCore)
def set_workload(self, process: Process) -> None:
self.core.workload = process
@overrides(AbstractCore)
def METHOD_NAME(self, value: bool) -> None:
self.core.switched_out = value
@overrides(AbstractCore)
def connect_interrupt(
self,
interrupt_requestor: Optional[Port] = None,
interrupt_responce: Optional[Port] = None,
) -> None:
# TODO: This model assumes that we will only create an interrupt
# controller as we require it. Not sure how true this is in all cases.
self.core.createInterruptController()
if self.get_isa().value == ISA.X86.value:
if interrupt_requestor != None:
self.core.interrupts[0].pio = interrupt_requestor
self.core.interrupts[0].int_responder = interrupt_requestor
if interrupt_responce != None:
self.core.interrupts[0].int_requestor = interrupt_responce
@overrides(AbstractCore)
def get_mmu(self) -> BaseMMU:
return self.core.mmu
@overrides(AbstractCore)
def _set_simpoint(
self, inst_starts: List[int], board_initialized: bool
) -> None:
if board_initialized:
self.core.scheduleSimpointsInstStop(sorted(set(inst_starts)))
else:
self.core.simpoint_start_insts = sorted(set(inst_starts))
@overrides(AbstractCore)
def _set_inst_stop_any_thread(
self, inst: int, board_initialized: bool
) -> None:
if board_initialized:
self.core.scheduleInstStopAnyThread(inst)
else:
self.core.max_insts_any_thread = inst
@overrides(AbstractCore)
def add_pc_tracker_probe(
self, target_pair: List[PcCountPair], manager: PcCountTrackerManager
) -> None:
pair_tracker = PcCountTracker()
pair_tracker.targets = target_pair
pair_tracker.core = self.core
pair_tracker.ptmanager = manager
self.core.probeListener = pair_tracker | null |
224 | #!/usr/bin/env python3
# Copyright (c) 2013 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This file is a library of commonly used functions used when interfacing
# with protobuf python messages. For eg, the decode scripts for different
# types of proto objects can use the same function to decode a single message
import gzip
import struct
def openFileRd(in_file):
"""
This opens the file passed as argument for reading using an appropriate
function depending on if it is gzipped or not. It returns the file
handle.
"""
try:
# First see if this file is gzipped
try:
# Opening the file works even if it is not a gzip file
proto_in = gzip.open(in_file, "rb")
# Force a check of the magic number by seeking in the
# file. If we do not do it here the error will occur when
# reading the first message.
proto_in.seek(1)
proto_in.seek(0)
except IOError:
proto_in = open(in_file, "rb")
except IOError:
print("Failed to open ", in_file, " for reading")
exit(-1)
return proto_in
def _DecodeVarint32(in_file):
"""
The decoding of the Varint32 is copied from
google.protobuf.internal.decoder and is only repeated here to
avoid depending on the internal functions in the library. If the
end of file is reached, return (0, 0).
"""
result = 0
shift = 0
pos = 0
# Use a 32-bit mask
mask = 0xFFFFFFFF
while 1:
c = in_file.read(1)
if len(c) == 0:
return (0, 0)
b = struct.unpack("<B", c)[0]
result |= (b & 0x7F) << shift
pos += 1
if not (b & 0x80):
if result > 0x7FFFFFFFFFFFFFFF:
result -= 1 << 64
result |= ~mask
else:
result &= mask
return (result, pos)
shift += 7
if shift >= 64:
raise IOError("Too many bytes when decoding varint.")
def decodeMessage(in_file, message):
"""
Attempt to read a message from the file and decode it. Return
False if no message could be read.
"""
try:
size, pos = _DecodeVarint32(in_file)
if size == 0:
return False
buf = in_file.read(size)
message.ParseFromString(buf)
return True
except IOError:
return False
def METHOD_NAME(out_file, value):
"""
The encoding of the Varint32 is copied from
google.protobuf.internal.encoder and is only repeated here to
avoid depending on the internal functions in the library.
"""
bits = value & 0x7F
value >>= 7
while value:
out_file.write(struct.pack("<B", 0x80 | bits))
bits = value & 0x7F
value >>= 7
out_file.write(struct.pack("<B", bits))
def encodeMessage(out_file, message):
"""
Encoded a message with the length prepended as a 32-bit varint.
"""
out = message.SerializeToString()
METHOD_NAME(out_file, len(out))
out_file.write(out) | null |
225 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeLaunchTemplateVersionsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeLaunchTemplateVersions','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_LaunchTemplateName(self): # String
return self.get_query_params().get('LaunchTemplateName')
def METHOD_NAME(self, LaunchTemplateName): # String
self.add_query_param('LaunchTemplateName', LaunchTemplateName)
def get_MaxVersion(self): # Long
return self.get_query_params().get('MaxVersion')
def set_MaxVersion(self, MaxVersion): # Long
self.add_query_param('MaxVersion', MaxVersion)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DefaultVersion(self): # Boolean
return self.get_query_params().get('DefaultVersion')
def set_DefaultVersion(self, DefaultVersion): # Boolean
self.add_query_param('DefaultVersion', DefaultVersion)
def get_MinVersion(self): # Long
return self.get_query_params().get('MinVersion')
def set_MinVersion(self, MinVersion): # Long
self.add_query_param('MinVersion', MinVersion)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_LaunchTemplateId(self): # String
return self.get_query_params().get('LaunchTemplateId')
def set_LaunchTemplateId(self, LaunchTemplateId): # String
self.add_query_param('LaunchTemplateId', LaunchTemplateId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LaunchTemplateVersions(self): # RepeatList
return self.get_query_params().get('LaunchTemplateVersion')
def set_LaunchTemplateVersions(self, LaunchTemplateVersion): # RepeatList
for depth1 in range(len(LaunchTemplateVersion)):
self.add_query_param('LaunchTemplateVersion.' + str(depth1 + 1), LaunchTemplateVersion[depth1])
def get_DetailFlag(self): # Boolean
return self.get_query_params().get('DetailFlag')
def set_DetailFlag(self, DetailFlag): # Boolean
self.add_query_param('DetailFlag', DetailFlag) | null |
226 | import json
import logging
from types import GeneratorType
import pytest
from pypuppetdb.errors import EmptyResponseError
from requests import Response
from requests.exceptions import ConnectionError, HTTPError
from werkzeug.exceptions import InternalServerError, NotFound
from puppetboard import app
from puppetboard import utils
def test_json_format():
demo = [{'foo': 'bar'}, {'bar': 'foo'}]
sample = json.dumps(demo, indent=2, separators=(',', ': '))
assert sample == utils.jsonprint(demo), "Json formatting has changed"
def test_parse_python_array():
python_array = [{'foo': 'bar'}, {'bar': 'foo'}]
python_array_as_string = str(python_array)
assert python_array == utils.parse_python(python_array_as_string)
def test_parse_python_not_really_array():
# the downside of simplifying showing plain strings without quotes is that it's hard
# to distinguish things that LOOK LIKE non-string but in fact are strings.
python_not_really_array = '"["foo", "bar"]"'
python_not_really_array_as_string = '"["foo", "bar"]"'
assert python_not_really_array == utils.parse_python(python_not_really_array_as_string)
def test_parse_python_dict():
python_dict = {'foo': 'bar'}
python_dict_as_string = str(python_dict)
assert python_dict == utils.parse_python(python_dict_as_string)
def test_parse_python_string():
a_string = "foobar"
assert a_string == utils.parse_python(a_string)
def test_format_val_str():
x = "some string"
assert x == utils.formatvalue(x), "Should return same value"
def METHOD_NAME():
x = ['a', 'b', 'c']
assert "a, b, c" == utils.formatvalue(x)
def test_format_val_dict_one_layer():
x = {'a': 'b'}
assert "a => b,<br/>" == utils.formatvalue(x)
def test_format_val_tuple():
x = ('a', 'b')
assert str(x) == utils.formatvalue(x)
def test_get():
x = "hello world"
def test_get_or_abort():
return x
assert x == utils.get_or_abort(test_get_or_abort)
@pytest.fixture
def mock_log(mocker):
return mocker.patch('logging.log')
@pytest.fixture
def mock_info_log(mocker):
logger = logging.getLogger('puppetboard.utils')
return mocker.patch.object(logger, 'info')
@pytest.fixture
def mock_err_log(mocker):
logger = logging.getLogger('puppetboard.utils')
return mocker.patch.object(logger, 'error')
def test_http_error(mock_log):
err = "NotFound"
def raise_http_error():
x = Response()
x.status_code = 404
x.reason = err
raise HTTPError(err, response=x)
with pytest.raises(NotFound):
utils.get_or_abort(raise_http_error)
mock_log.error.assert_called_once_with(err)
def test_http_error_reraised_as_client_side(mock_log):
err = "The request is invalid because ..."
def raise_http_400_error():
x = Response()
x.status_code = 400
x.reason = err
raise HTTPError(err, response=x)
with pytest.raises(HTTPError):
utils.get_or_abort_except_client_errors(raise_http_400_error)
mock_log.warning.assert_called_once_with(err)
def test_http_connection_error(mock_log):
err = "ConnectionError"
def connection_error():
x = Response()
x.status_code = 500
x.reason = err
raise ConnectionError(err, response=x)
with pytest.raises(InternalServerError):
utils.get_or_abort(connection_error)
mock_log.error.assert_called_with(err)
def test_basic_exception(mock_log):
err = "Exception"
def exception_error():
x = Response()
x.reason = err
raise Exception(err)
with pytest.raises(Exception) as exception:
utils.get_or_abort(exception_error())
mock_log.error.assert_called_with(err)
assert exception.status_code == 500
@pytest.mark.parametrize(
"version,is_ok",
[
("4.2.0", False),
("5.1.0", False),
("5.2.0", True),
("5.2.19", True),
("6.4.0", True),
("6.9.1", True),
("7.0.0", True),
("7.11.1-20220809_222149-g0b0b67c", True),
("8.0.0", True),
],
)
def test_db_version(mocker, version, is_ok):
mocker.patch.object(app.puppetdb, "current_version", return_value=version)
if is_ok:
utils.check_db_version(app.puppetdb)
else:
with pytest.raises(SystemExit) as e:
utils.check_db_version(app.puppetdb)
assert e.code == 1
def test_db_invalid_version(mocker, mock_err_log):
mocker.patch.object(app.puppetdb, 'current_version', return_value='4')
with pytest.raises(SystemExit) as e:
utils.check_db_version(app.puppetdb)
assert e.code == 2
def test_db_http_error(mocker, mock_err_log):
err = "NotFound"
def raise_http_error():
x = Response()
x.status_code = 404
x.reason = err
raise HTTPError(err, response=x)
mocker.patch.object(app.puppetdb, 'current_version',
side_effect=raise_http_error)
with pytest.raises(SystemExit) as e:
utils.check_db_version(app.puppetdb)
assert e.code == 2
def test_db_connection_error(mocker, mock_err_log):
err = "ConnectionError"
def connection_error():
x = Response()
x.status_code = 500
x.reason = err
raise ConnectionError(err, response=x)
mocker.patch.object(app.puppetdb, 'current_version',
side_effect=connection_error)
with pytest.raises(SystemExit) as e:
utils.check_db_version(app.puppetdb)
assert e.code == 2
def test_db_empty_response(mocker, mock_err_log):
err = "Empty Response"
def connection_error():
raise EmptyResponseError(err)
mocker.patch.object(app.puppetdb, 'current_version',
side_effect=connection_error)
with pytest.raises(SystemExit) as e:
utils.check_db_version(app.puppetdb)
assert e.code == 2
def test_iter():
test_list = (0, 1, 2, 3)
def my_generator():
for element in test_list:
yield element
gen = utils.yield_or_stop(my_generator())
assert isinstance(gen, GeneratorType)
i = 0
for val in gen:
assert i == val
i = i + 1
def test_stop_empty():
def my_generator():
yield 1
raise EmptyResponseError
gen = utils.yield_or_stop(my_generator())
for val in gen:
assert 1 == val
def test_stop_conn_error():
def my_generator():
yield 1
raise ConnectionError
gen = utils.yield_or_stop(my_generator())
for val in gen:
assert 1 == val
def test_stop_http_error():
def my_generator():
yield 1
raise HTTPError
gen = utils.yield_or_stop(my_generator())
for val in gen:
assert 1 == val
def test_quote_columns_data():
quoted_with_dot = utils.quote_columns_data('foo.bar')
assert quoted_with_dot == 'foo\\.bar' | null |
227 | from django.urls import NoReverseMatch
from rest_framework import exceptions as drf_exceptions
from rest_framework import versioning as drf_versioning
from rest_framework.compat import unicode_http_header
from rest_framework.utils.mediatypes import _MediaType
from distutils.version import StrictVersion
from api.base import exceptions
from api.base import utils
from api.base.renderers import BrowsableAPIRendererNoForms
from api.base.settings import LATEST_VERSIONS
# Determines API version to rely on new DraftRegistration Serializers
DRAFT_REGISTRATION_SERIALIZERS_UPDATE_VERSION = '2.20'
# Determines the API version that will start using new
# fields to create a registration
CREATE_REGISTRATION_FIELD_CHANGE_VERSION = '2.19'
# KEBAB_CASE_VERSION determines the API version in which kebab-case will begin being accepted.
# Note that this version will not deprecate snake_case yet.
KEBAB_CASE_VERSION = '2.18'
def get_major_version(version):
return int(version.split('.')[0])
def url_path_version_to_decimal(url_path_version):
# 'v2' --> '2.0'
return str(float(url_path_version.split('v')[1]))
def decimal_version_to_url_path(decimal_version):
# '2.0' --> 'v2'
return 'v{}'.format(get_major_version(decimal_version))
def get_latest_sub_version(major_version):
# '2' --> '2.6'
return LATEST_VERSIONS.get(major_version, None)
def get_kebab_snake_case_field(version, field):
if StrictVersion(version) < StrictVersion(KEBAB_CASE_VERSION):
return field.replace('-', '_')
else:
return field
class BaseVersioning(drf_versioning.BaseVersioning):
def __init__(self):
super(BaseVersioning, self).__init__()
def get_url_path_version(self, kwargs):
invalid_version_message = 'Invalid version in URL path.'
version = kwargs.get(self.version_param)
if version is None:
return self.default_version
version = url_path_version_to_decimal(version)
if not self.is_allowed_version(version):
raise drf_exceptions.NotFound(invalid_version_message)
if get_major_version(version) == get_major_version(self.default_version):
return self.default_version
return version
def get_header_version(self, request, major_version):
invalid_version_message = 'Invalid version in "Accept" header.'
media_type = _MediaType(request.accepted_media_type)
version = media_type.params.get(self.version_param)
if not version:
return None
version = unicode_http_header(version)
if version == 'latest':
return get_latest_sub_version(major_version)
if not self.is_allowed_version(version):
raise drf_exceptions.NotAcceptable(invalid_version_message)
return version
def get_default_version(self, request, major_version):
"""Returns the latest available version for the browsable api, otherwise REST_FRAMEWORK default version"""
if request.accepted_renderer.__class__ == BrowsableAPIRendererNoForms:
return get_latest_sub_version(major_version)
return self.default_version
def get_query_param_version(self, request, major_version):
invalid_version_message = 'Invalid version in query parameter.'
version = request.query_params.get(self.version_param)
if not version:
return None
if version == 'latest':
return get_latest_sub_version(major_version)
if not self.is_allowed_version(version):
raise drf_exceptions.NotFound(invalid_version_message)
return version
def validate_pinned_versions(self, url_path_version, header_version, query_parameter_version):
url_path_major_version = get_major_version(url_path_version)
header_major_version = get_major_version(header_version) if header_version else None
query_major_version = get_major_version(query_parameter_version) if query_parameter_version else None
if header_version and header_major_version != url_path_major_version:
raise exceptions.Conflict(
detail='Version {} specified in "Accept" header does not fall within URL path version {}'.format(
header_version,
url_path_version,
),
)
if query_parameter_version and query_major_version != url_path_major_version:
raise exceptions.Conflict(
detail='Version {} specified in query parameter does not fall within URL path version {}'.format(
query_parameter_version,
url_path_version,
),
)
if header_version and query_parameter_version and (header_version != query_parameter_version):
raise exceptions.Conflict(
detail='Version {} specified in "Accept" header does not match version {} specified in query parameter'.format(
header_version,
query_parameter_version,
),
)
def METHOD_NAME(self, request, *args, **kwargs):
url_path_version = self.get_url_path_version(kwargs)
major_version = get_major_version(url_path_version)
header_version = self.get_header_version(request, major_version)
query_parameter_version = self.get_query_param_version(request, major_version)
version = url_path_version
if header_version or query_parameter_version:
self.validate_pinned_versions(url_path_version, header_version, query_parameter_version)
version = header_version if header_version else query_parameter_version
else:
version = self.get_default_version(request, major_version)
return version
def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra):
url_path_version = self.get_url_path_version(kwargs)
major_version = get_major_version(url_path_version)
query_parameter_version = self.get_query_param_version(request, major_version)
kwargs = {} if (kwargs is None) else kwargs
kwargs[self.version_param] = decimal_version_to_url_path(url_path_version)
query_kwargs = {'version': query_parameter_version} if query_parameter_version else None
return utils.absolute_reverse(
viewname, query_kwargs=query_kwargs, args=args, kwargs=kwargs,
)
class PrivateVersioning(BaseVersioning):
def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra):
""" Overrides BaseVersioning.reverse to maybe ignore 'version' arg
Requests to private views in the '_' namespace don't have any version associated
with them. Related fields in their serializers that point to views in a versioned
namespace require a version kwarg to be reversed correctly. This first tries the
reverse with one, then without if that fails.
"""
try:
return super(PrivateVersioning, self).reverse(viewname, args=args, kwargs=kwargs, request=request, format=format, **extra)
except NoReverseMatch:
kwargs = kwargs or {}
if kwargs.get('version', False):
kwargs.pop('version')
return utils.absolute_reverse(
viewname, query_kwargs=None, args=args, kwargs=kwargs,
)
kwargs['version'] = get_latest_sub_version('2')
return super(PrivateVersioning, self).reverse(viewname, args=args, kwargs=kwargs, request=request, format=format, **extra) | null |
228 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkga.endpoint import endpoint_data
class UpdateListenerRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ga', '2019-11-20', 'UpdateListener','gaplus')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_BackendPortss(self): # RepeatList
return self.get_query_params().get('BackendPorts')
def set_BackendPortss(self, BackendPorts): # RepeatList
for depth1 in range(len(BackendPorts)):
if BackendPorts[depth1].get('FromPort') is not None:
self.add_query_param('BackendPorts.' + str(depth1 + 1) + '.FromPort', BackendPorts[depth1].get('FromPort'))
if BackendPorts[depth1].get('ToPort') is not None:
self.add_query_param('BackendPorts.' + str(depth1 + 1) + '.ToPort', BackendPorts[depth1].get('ToPort'))
def get_ListenerId(self): # String
return self.get_query_params().get('ListenerId')
def set_ListenerId(self, ListenerId): # String
self.add_query_param('ListenerId', ListenerId)
def get_Protocol(self): # String
return self.get_query_params().get('Protocol')
def METHOD_NAME(self, Protocol): # String
self.add_query_param('Protocol', Protocol)
def get_XForwardedForConfig(self): # Struct
return self.get_query_params().get('XForwardedForConfig')
def set_XForwardedForConfig(self, XForwardedForConfig): # Struct
if XForwardedForConfig.get('XForwardedForGaIdEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForGaIdEnabled', XForwardedForConfig.get('XForwardedForGaIdEnabled'))
if XForwardedForConfig.get('XForwardedForProtoEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForProtoEnabled', XForwardedForConfig.get('XForwardedForProtoEnabled'))
if XForwardedForConfig.get('XForwardedForPortEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForPortEnabled', XForwardedForConfig.get('XForwardedForPortEnabled'))
if XForwardedForConfig.get('XRealIpEnabled') is not None:
self.add_query_param('XForwardedForConfig.XRealIpEnabled', XForwardedForConfig.get('XRealIpEnabled'))
if XForwardedForConfig.get('XForwardedForGaApEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForGaApEnabled', XForwardedForConfig.get('XForwardedForGaApEnabled'))
def get_SecurityPolicyId(self): # String
return self.get_query_params().get('SecurityPolicyId')
def set_SecurityPolicyId(self, SecurityPolicyId): # String
self.add_query_param('SecurityPolicyId', SecurityPolicyId)
def get_ProxyProtocol(self): # String
return self.get_query_params().get('ProxyProtocol')
def set_ProxyProtocol(self, ProxyProtocol): # String
self.add_query_param('ProxyProtocol', ProxyProtocol)
def get_PortRangess(self): # RepeatList
return self.get_query_params().get('PortRanges')
def set_PortRangess(self, PortRanges): # RepeatList
for depth1 in range(len(PortRanges)):
if PortRanges[depth1].get('FromPort') is not None:
self.add_query_param('PortRanges.' + str(depth1 + 1) + '.FromPort', PortRanges[depth1].get('FromPort'))
if PortRanges[depth1].get('ToPort') is not None:
self.add_query_param('PortRanges.' + str(depth1 + 1) + '.ToPort', PortRanges[depth1].get('ToPort'))
def get_Certificatess(self): # RepeatList
return self.get_query_params().get('Certificates')
def set_Certificatess(self, Certificates): # RepeatList
for depth1 in range(len(Certificates)):
if Certificates[depth1].get('Id') is not None:
self.add_query_param('Certificates.' + str(depth1 + 1) + '.Id', Certificates[depth1].get('Id'))
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_ClientAffinity(self): # String
return self.get_query_params().get('ClientAffinity')
def set_ClientAffinity(self, ClientAffinity): # String
self.add_query_param('ClientAffinity', ClientAffinity) | null |
229 | # Copyright 2017-2020 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base_api import API
API_GET_FULL_REGISTRIES_HIERARCHY = 'dockerRegistry/loadTree'
API_REGISTRY_CERTIFICATE = 'dockerRegistry/{registry_id}/cert'
API_TOOL_GROUP = 'toolGroup'
API_TOOL_DELETE = 'tool/delete'
API_TOOL_SEARCH = 'tool/load'
API_TOOL_UPDATE = 'tool/update'
API_TOOL_REGISTER = 'tool/register'
API_TOOL_SYMLINK = 'tool/symlink'
API_TOOL_SETTINGS = 'tool/{tool_id}/settings'
API_TOOL_ICON = 'tool/{tool_id}/icon'
API_ALLOWED_INSTANCE_INFO = 'cluster/instance/allowed'
class ReadOnlyToolSyncAPI(API):
def __init__(self, api_path, access_key):
super(ReadOnlyToolSyncAPI, self).__init__(api_path, access_key)
def load_registries_hierarchy(self):
response = self.call(API_GET_FULL_REGISTRIES_HIERARCHY, http_method='GET')
return self.parse_response(response=response, default_value=[])
def search_tool_by_name(self, image_name):
tool = None
try:
response = self.call(API_TOOL_SEARCH, params={"image": image_name}, http_method='GET')
tool = self.parse_response(response=response)
except RuntimeError as e:
print('Tool [{image_name}] is not found.'.format(image_name=image_name))
return tool
def load_tool_settings(self, tool_id, tool_version=None):
response = self.call(API_TOOL_SETTINGS.format(tool_id=tool_id), params={'version': tool_version},
http_method='GET')
return self.parse_response(response=response, default_value=[])
def load_icon(self, tool_id):
return self.call(API_TOOL_ICON.format(tool_id=tool_id), http_method='GET')
def METHOD_NAME(self):
response = self.call(API_ALLOWED_INSTANCE_INFO, http_method='GET')
return self.parse_response(response=response, default_value={})
def load_registry_certificate(self, registry_id):
return self.call(API_REGISTRY_CERTIFICATE.format(registry_id=registry_id), http_method='GET')
class ToolSyncAPI(ReadOnlyToolSyncAPI):
def __init__(self, api_path, access_key):
super(ToolSyncAPI, self).__init__(api_path, access_key)
def create_tool_group(self, tool_group):
response = self.call(API_TOOL_GROUP, data=API.to_json(tool_group), http_method='POST')
return self.parse_response(response=response)
def put_tool_settings(self, tool_id, version, settings):
return self.call(API_TOOL_SETTINGS.format(tool_id=tool_id), params={'version': version},
data=API.to_json(settings),
http_method='POST')
def delete_tool(self, tool_image, version):
return self.call(API_TOOL_DELETE, params={'image': tool_image, 'hard': False}, http_method='DELETE')
def create_symlink(self, tool_id, group_id):
return self.call(API_TOOL_SYMLINK, data=API.to_json({'toolId': tool_id, 'groupId': group_id}),
http_method='POST')
def upload_icon(self, tool_id):
with open('icon_{}.png'.format(tool_id), 'rb') as icon_file:
return self.call(API_TOOL_ICON.format(tool_id=tool_id), http_method='POST', files={'icon': icon_file})
def update_tool(self, tool):
response = self.call(API_TOOL_UPDATE, data=API.to_json(tool), http_method='POST')
return self.parse_response(response=response)
def create_tool(self, tool):
response = self.call(API_TOOL_REGISTER, data=API.to_json(tool), http_method='POST')
return self.parse_response(response=response) | null |
230 | import calendar
from flask import make_response, Response
from flask_appbuilder import expose, has_access, permission_name
from flask_appbuilder import ModelView
from flask_appbuilder.charts.views import GroupByChartView
from flask_appbuilder.models.group import aggregate_count
from flask_appbuilder.models.mongoengine.interface import MongoEngineInterface
from . import appbuilder
from .models import ContactGroup, Contact, Tags, Gender
def fill_gender():
try:
g1 = Gender(name="Male")
g1.save()
g2 = Gender(name="Female")
g2.save()
except:
pass
class ContactModelView(ModelView):
datamodel = MongoEngineInterface(Contact)
label_columns = {"image_thumb_show": "Photo", "image_show": "Photo"}
list_columns = [
"image_thumb_show",
"name",
"personal_celphone",
"birthday",
"contact_group",
]
show_columns = [
"image_show",
"name",
"personal_celphone",
"birthday",
"contact_group",
]
@expose("/mongo_download/<pk>")
@has_access
def METHOD_NAME(self, pk):
item = self.datamodel.get(pk)
file = item.file.read()
response = make_response(file)
response.headers["Content-Disposition"] = "attachment; filename={0}".format(
item.file.name
)
return response
@expose("/img/<pk>")
@has_access
@permission_name("show_img")
def img(self, pk):
item = self.datamodel.get(pk)
mime_type = item.image.content_type
return Response(item.image.read(), mimetype=mime_type, direct_passthrough=True)
@expose("/img_thumb/<pk>")
@has_access
@permission_name("show_img")
def img_thumb(self, pk):
item = self.datamodel.get(pk)
mime_type = item.image.content_type
return Response(
item.image.thumbnail.read(), mimetype=mime_type, direct_passthrough=True
)
class GroupModelView(ModelView):
datamodel = MongoEngineInterface(ContactGroup)
related_views = [ContactModelView]
search_columns = ["name"]
class TagsModelView(ModelView):
datamodel = MongoEngineInterface(Tags)
class ContactChartView(GroupByChartView):
datamodel = MongoEngineInterface(Contact)
chart_title = "Grouped contacts"
label_columns = ContactModelView.label_columns
chart_type = "PieChart"
definitions = [
{"group": "contact_group", "series": [(aggregate_count, "contact_group")]},
{"group": "gender", "series": [(aggregate_count, "gender")]},
]
def pretty_month_year(value):
return calendar.month_name[value.month] + " " + str(value.year)
def pretty_year(value):
return str(value.year)
class ContactTimeChartView(GroupByChartView):
datamodel = MongoEngineInterface(Contact)
chart_title = "Grouped Birth contacts"
chart_type = "AreaChart"
label_columns = ContactModelView.label_columns
definitions = [
{
"group": "month_year",
"formatter": pretty_month_year,
"series": [(aggregate_count, "contact_group")],
},
{
"group": "year",
"formatter": pretty_year,
"series": [(aggregate_count, "contact_group")],
},
]
appbuilder.add_view(
GroupModelView,
"List Groups",
icon="fa-folder-open-o",
category="Contacts",
category_icon="fa-envelope",
)
appbuilder.add_view(
ContactModelView,
"List Contacts",
icon="fa-folder-open-o",
category="Contacts",
category_icon="fa-envelope",
)
appbuilder.add_view(
TagsModelView,
"List Tags",
icon="fa-folder-open-o",
category="Contacts",
category_icon="fa-envelope",
)
appbuilder.add_separator("Contacts")
appbuilder.add_view(
ContactChartView, "Contacts Chart", icon="fa-dashboard", category="Contacts"
)
appbuilder.add_view(
ContactTimeChartView,
"Contacts Birth Chart",
icon="fa-dashboard",
category="Contacts",
)
appbuilder.security_cleanup()
fill_gender() | null |
231 | import subprocess
from error_codes import *
from errors import error_info
from helpers import geninfo_lookup, find_dce
SSL_CMD = "echo | openssl s_client -connect {0}:443 -brief"
CURL_CMD = "curl -s -S -k https://{0}/ping"
GLOBAL_HANDLER_URL = "global.handler.control.monitor.azure.com"
REGION_HANDLER_URL = "{0}.handler.control.monitor.azure.com"
ODS_URL = "{0}.ods.opinsights.azure.com"
ME_URL = "management.azure.com"
ME_REGION_URL = "{0}.monitoring.azure.com"
def check_endpt_ssl(ssl_cmd, endpoint):
"""
openssl connect to specific endpoint
"""
try:
ssl_output = subprocess.check_output(ssl_cmd.format(endpoint), shell=True,\
stderr=subprocess.STDOUT, universal_newlines=True)
ssl_output_lines = ssl_output.split('\n')
(connected, verified) = (False, False)
for line in ssl_output_lines:
if (line == "CONNECTION ESTABLISHED"):
connected = True
continue
if (line == "Verification: OK"):
verified = True
continue
return (connected, verified, ssl_output)
except Exception as e:
return (False, False, e)
def METHOD_NAME():
"""
check general internet connectivity
"""
(connected_docs, verified_docs, e) = check_endpt_ssl(SSL_CMD, "docs.microsoft.com")
if (connected_docs and verified_docs):
return NO_ERROR
elif (connected_docs and not verified_docs):
error_info.append((SSL_CMD.format("docs.microsoft.com"),))
return WARN_INTERNET
else:
error_info.append((SSL_CMD.format("docs.microsoft.com"),))
return WARN_INTERNET_CONN
def resolve_ip(endpoint):
try:
result = subprocess.call(['nslookup', endpoint], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if not result == 0:
return False, "nslookup {0}".format(endpoint)
else:
return (True, None)
except Exception as e:
return (False, e)
def check_endpt_curl(endpoint):
command = CURL_CMD.format(endpoint)
try:
# check proxy
proxy = geninfo_lookup('MDSD_PROXY_ADDRESS')
username = geninfo_lookup('MDSD_PROXY_USERNAME')
if not proxy == None:
command = command + ' -x {0}'.format(proxy)
if not username == None:
password = geninfo_lookup('MDSD_PROXY_PASSWORD')
command = command + ' -U {0}:{1}'.format(username, password)
output = subprocess.check_output(command, shell=True,\
stderr=subprocess.STDOUT, universal_newlines=True)
if output == "Healthy":
return NO_ERROR
else:
if proxy == None:
error_info.append((endpoint, command, output))
return ERR_ENDPT
else:
error_info.append((endpoint, command, output))
return ERR_ENDPT_PROXY
except Exception as e:
error_info.append((endpoint, command, e))
return ERR_ENDPT
def check_ama_endpts():
# compose URLs to check
endpoints = [GLOBAL_HANDLER_URL]
regions = geninfo_lookup('DCR_REGION')
workspace_ids = geninfo_lookup('DCR_WORKSPACE_ID')
if regions == None or workspace_ids == None:
return ERR_INFO_MISSING
for region in regions:
endpoints.append(REGION_HANDLER_URL.format(region))
for id in workspace_ids:
endpoints.append(ODS_URL.format(id))
if not geninfo_lookup('ME_REGION') == None:
endpoints.append(ME_URL)
for me_region in geninfo_lookup('ME_REGION'):
endpoints.append(ME_REGION_URL.format(me_region))
# modify URLs if URL suffix is .us(Azure Government) or .cn(Azure China)
url_suffix = geninfo_lookup('URL_SUFFIX')
if not url_suffix == '.com':
for endpoint in endpoints:
endpoint.replace('.com', url_suffix)
dce, e = find_dce()
if e != None:
error_info.append((e,))
return ERR_DCE
for endpoint in dce:
endpoints.append(endpoint)
for endpoint in endpoints:
# check if IP address can be resolved using nslookup
resolved, e = resolve_ip(endpoint)
if not resolved:
error_info.append((endpoint,e))
return ERR_RESOLVE_IP
# check ssl handshake
command = SSL_CMD
# skip openssl check with authenticated proxy
if not geninfo_lookup('MDSD_PROXY_USERNAME') == None:
return WARN_OPENSSL_PROXY
proxy = geninfo_lookup('MDSD_PROXY_ADDRESS')
if not proxy == None:
proxy = proxy.replace('http://', '')
command = command + ' -proxy {0}'.format(proxy)
if not geninfo_lookup('SSL_CERT_DIR') == None:
command = command + " -CApath " + geninfo_lookup('SSL_CERT_DIR')
if not geninfo_lookup('SSL_CERT_FILE') == None:
command = command + " -CAfile " + geninfo_lookup('SSL_CERT_FILE')
(connected, verified, e) = check_endpt_ssl(command, endpoint)
if not connected or not verified:
error_info.append((endpoint, command.format(endpoint), e))
return ERR_ENDPT
# check AMCS ping results
if "handler.control.monitor" in endpoint:
checked_curl = check_endpt_curl(endpoint)
if checked_curl != NO_ERROR:
return checked_curl
return NO_ERRO | null |
232 | """
Use nextclade QC to produce a list of sequences to be excluded.
"""
import argparse
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
def isfloat(value):
try:
float(value)
return True
except ValueError:
return False
def METHOD_NAME(x, minus_weeks=0):
try:
return (datetime.strptime(x,"%Y-%m-%d") - timedelta(weeks=minus_weeks)).toordinal()
except:
return np.nan
def earliest_clade_date(Nextstrain_clade, clade_emergence_dates_filename, window_weeks=2):
clade_dates = pd.read_csv(clade_emergence_dates_filename, index_col="Nextstrain_clade", sep='\t')
try:
return METHOD_NAME(clade_dates.loc[Nextstrain_clade]['first_sequence'], minus_weeks=window_weeks)
except:
return np.nan
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="check sequences for anomalies",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("--metadata", type=str, required=True, help="metadata")
parser.add_argument("--clade_emergence_dates", type=str, default="defaults/clade_emergence_dates.tsv", help="tsv file with two columns: Nextstrain_clade name and first known sequence for that clade.")
parser.add_argument("--clock-filter-recent", type=float, default=20, help="maximal allowed deviation from the molecular clock")
parser.add_argument("--clock-filter", type=float, default=15, help="maximal allowed deviation from the molecular clock")
parser.add_argument("--snp-clusters", type=int, default=1, help="maximal allowed SNP clusters (as defined by nextclade)")
parser.add_argument("--contamination", type=int, default=7, help="maximal allowed putative contamination (labeled + reversion) mutations as defined by nextclade")
parser.add_argument("--clade-emergence-window", type=int, default=2, help="number of weeks before official emergence of clade at which sequences can safely be excluded")
parser.add_argument("--skip-inputs", type=str, nargs="*", help="names of inputs to skip diagnostics for based on presence of metadata fields named like '{input}' with a value of 'yes'")
parser.add_argument("--output-exclusion-list", type=str, required=True, help="Output to-be-reviewed addition to exclude.txt")
args = parser.parse_args()
metadata = pd.read_csv(args.metadata, sep='\t')
# If any inputs should be skipped for diagnostics, remove their records from
# metadata prior to analysis.
if args.skip_inputs:
for input_name in args.skip_inputs:
if input_name in metadata.columns:
metadata = metadata.loc[metadata[input_name] != "yes"].copy()
check_recency = "date_submitted" in metadata
if check_recency:
recency_cutoff = (datetime.today() - timedelta(weeks=4)).toordinal()
recent_sequences = metadata.date_submitted.apply(lambda x: METHOD_NAME(x)>recency_cutoff)
else:
print("Skipping QC steps which rely on submission recency, as metadata is missing 'date_submitted'")
check_clade_dates = "Nextstrain_clade" in metadata
#auto exclude sequences N weeks before their clade emergence
if check_clade_dates:
dates = metadata.date.apply(lambda x: METHOD_NAME(x))
clade_dates = metadata.Nextstrain_clade.apply(lambda x: earliest_clade_date(x, args.clade_emergence_dates, window_weeks=args.clade_emergence_window))
else:
print("Skipping QC steps which rely on clade-date combinations, as metadata is missing 'Nextstrain_clade'")
if "clock_deviation" in metadata.columns:
clock_deviation = np.array([float(x) if isfloat(x) else np.nan for x in metadata.clock_deviation])
else:
clock_deviation = np.zeros(len(metadata), dtype=bool)
if "reversion_mutations" in metadata.columns:
reversion_mutations = np.array([float(x) if isfloat(x) else np.nan for x in metadata.reversion_mutations])
else:
reversion_mutations = np.zeros(len(metadata), dtype=bool)
if "potential_contaminants" in metadata.columns:
contaminants = np.array([float(x) if isfloat(x) else np.nan for x in metadata.potential_contaminants])
else:
contaminants = np.zeros(len(metadata), dtype=bool)
if "snp_clusters" in metadata.columns:
snp_clusters = np.array([float(x) if isfloat(x) else np.nan for x in metadata.snp_clusters])
else:
snp_clusters = np.zeros(len(metadata), dtype=bool)
to_exclude = np.zeros_like(clock_deviation, dtype=bool)
to_exclude |= (reversion_mutations+contaminants>args.contamination)
if check_recency:
to_exclude |= np.abs(clock_deviation)>args.clock_filter_recent
to_exclude |= (np.abs(clock_deviation)>args.clock_filter)&(~recent_sequences)
else:
to_exclude |= np.abs(clock_deviation)>args.clock_filter
if check_clade_dates:
to_exclude |= dates<clade_dates
to_exclude |= snp_clusters>args.snp_clusters
if "QC_mixed_sites" in metadata.columns:
to_exclude |= metadata.QC_mixed_sites=='bad'
# write out file with sequences flagged for exclusion
with open(args.output_exclusion_list, 'w') as excl:
for s in metadata.loc[to_exclude,'strain']:
excl.write(f'{s}\n') | null |
233 | # Copyright (c) ZenML GmbH 2022. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Amazon S3 artifact store flavor."""
import json
import re
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Set,
Type,
Union,
)
from pydantic import validator
from zenml.artifact_stores import (
BaseArtifactStoreConfig,
BaseArtifactStoreFlavor,
)
from zenml.integrations.s3 import S3_ARTIFACT_STORE_FLAVOR
from zenml.models import ServiceConnectorRequirements
from zenml.stack.authentication_mixin import AuthenticationConfigMixin
from zenml.utils.networking_utils import (
replace_localhost_with_internal_hostname,
)
from zenml.utils.secret_utils import SecretField
if TYPE_CHECKING:
from zenml.integrations.s3.artifact_stores import S3ArtifactStore
class S3ArtifactStoreConfig(
BaseArtifactStoreConfig, AuthenticationConfigMixin
):
"""Configuration for the S3 Artifact Store.
All attributes of this class except `path` will be passed to the
`s3fs.S3FileSystem` initialization. See
[here](https://s3fs.readthedocs.io/en/latest/) for more information on how
to use those configuration options to connect to any S3-compatible storage.
When you want to register an S3ArtifactStore from the CLI and need to pass
`client_kwargs`, `config_kwargs` or `s3_additional_kwargs`, you should pass
them as a json string:
```
zenml artifact-store register my_s3_store --flavor=s3 \
--path=s3://my_bucket --client_kwargs='{"endpoint_url": "http://my-s3-endpoint"}'
```
"""
SUPPORTED_SCHEMES: ClassVar[Set[str]] = {"s3://"}
key: Optional[str] = SecretField()
secret: Optional[str] = SecretField()
token: Optional[str] = SecretField()
client_kwargs: Optional[Dict[str, Any]] = None
config_kwargs: Optional[Dict[str, Any]] = None
s3_additional_kwargs: Optional[Dict[str, Any]] = None
@validator(
"client_kwargs", "config_kwargs", "s3_additional_kwargs", pre=True
)
def _convert_json_string(
cls, value: Union[None, str, Dict[str, Any]]
) -> Optional[Dict[str, Any]]:
"""Converts potential JSON strings passed via the CLI to dictionaries.
Args:
value: The value to convert.
Returns:
The converted value.
Raises:
TypeError: If the value is not a `str`, `Dict` or `None`.
ValueError: If the value is an invalid json string or a json string
that does not decode into a dictionary.
"""
if isinstance(value, str):
try:
dict_ = json.loads(value)
except json.JSONDecodeError as e:
raise ValueError(f"Invalid json string '{value}'") from e
if not isinstance(dict_, Dict):
raise ValueError(
f"Json string '{value}' did not decode into a dictionary."
)
return dict_
elif isinstance(value, Dict) or value is None:
return value
else:
raise TypeError(f"{value} is not a json string or a dictionary.")
@validator("client_kwargs")
def _validate_client_kwargs(
cls, value: Optional[Dict[str, Any]]
) -> Optional[Dict[str, Any]]:
"""Validates the `client_kwargs` attribute.
Args:
value: The value to validate.
Raises:
ValueError: If the value is not a valid URL.
Returns:
The validated value.
"""
if value is None:
return value
if "endpoint_url" in value and value["endpoint_url"]:
url = value["endpoint_url"].rstrip("/")
scheme = re.search("^([a-z0-9]+://)", url)
if scheme is None or scheme.group() not in ("https://", "http://"):
raise ValueError(
"Invalid URL for endpoint url: {url}. Should be in the form "
"https://hostname[:port] or http://hostname[:port]."
)
# When running inside a container, if the URL uses localhost, the
# target service will not be available. We try to replace localhost
# with one of the special Docker or K3D internal hostnames.
value["endpoint_url"] = replace_localhost_with_internal_hostname(
url
)
return value
class S3ArtifactStoreFlavor(BaseArtifactStoreFlavor):
"""Flavor of the S3 artifact store."""
@property
def name(self) -> str:
"""Name of the flavor.
Returns:
The name of the flavor.
"""
return S3_ARTIFACT_STORE_FLAVOR
@property
def service_connector_requirements(
self,
) -> Optional[ServiceConnectorRequirements]:
"""Service connector resource requirements for service connectors.
Specifies resource requirements that are used to filter the available
service connector types that are compatible with this flavor.
Returns:
Requirements for compatible service connectors, if a service
connector is required for this flavor.
"""
return ServiceConnectorRequirements(
resource_type="s3-bucket",
resource_id_attr="path",
)
@property
def docs_url(self) -> Optional[str]:
"""A url to point at docs explaining this flavor.
Returns:
A flavor docs url.
"""
return self.generate_default_docs_url()
@property
def sdk_docs_url(self) -> Optional[str]:
"""A url to point at SDK docs explaining this flavor.
Returns:
A flavor SDK docs url.
"""
return self.generate_default_sdk_docs_url()
@property
def logo_url(self) -> str:
"""A url to represent the flavor in the dashboard.
Returns:
The flavor logo.
"""
return "https://public-flavor-logos.s3.eu-central-1.amazonaws.com/artifact_store/aws.png"
@property
def METHOD_NAME(self) -> Type[S3ArtifactStoreConfig]:
"""The config class of the flavor.
Returns:
The config class of the flavor.
"""
return S3ArtifactStoreConfig
@property
def implementation_class(self) -> Type["S3ArtifactStore"]:
"""Implementation class for this flavor.
Returns:
The implementation class for this flavor.
"""
from zenml.integrations.s3.artifact_stores import S3ArtifactStore
return S3ArtifactStore | null |
234 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkunimkt.endpoint import endpoint_data
class ListSlotRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'UniMkt', '2018-12-12', 'ListSlot')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AdSlotType(self): # String
return self.get_query_params().get('AdSlotType')
def set_AdSlotType(self, AdSlotType): # String
self.add_query_param('AdSlotType', AdSlotType)
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_OriginSiteUserId(self): # String
return self.get_query_params().get('OriginSiteUserId')
def set_OriginSiteUserId(self, OriginSiteUserId): # String
self.add_query_param('OriginSiteUserId', OriginSiteUserId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_MediaName(self): # String
return self.get_query_params().get('MediaName')
def set_MediaName(self, MediaName): # String
self.add_query_param('MediaName', MediaName)
def get_AppName(self): # String
return self.get_query_params().get('AppName')
def set_AppName(self, AppName): # String
self.add_query_param('AppName', AppName)
def get_AdSlotStatus(self): # String
return self.get_query_params().get('AdSlotStatus')
def set_AdSlotStatus(self, AdSlotStatus): # String
self.add_query_param('AdSlotStatus', AdSlotStatus)
def get_TenantId(self): # String
return self.get_query_params().get('TenantId')
def set_TenantId(self, TenantId): # String
self.add_query_param('TenantId', TenantId)
def get_AdSlotId(self): # String
return self.get_query_params().get('AdSlotId')
def set_AdSlotId(self, AdSlotId): # String
self.add_query_param('AdSlotId', AdSlotId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AdSlotCorporateStatus(self): # String
return self.get_query_params().get('AdSlotCorporateStatus')
def set_AdSlotCorporateStatus(self, AdSlotCorporateStatus): # String
self.add_query_param('AdSlotCorporateStatus', AdSlotCorporateStatus)
def get_EndCreateTime(self): # Long
return self.get_query_params().get('EndCreateTime')
def set_EndCreateTime(self, EndCreateTime): # Long
self.add_query_param('EndCreateTime', EndCreateTime)
def get_Business(self): # String
return self.get_query_params().get('Business')
def set_Business(self, Business): # String
self.add_query_param('Business', Business)
def get_MediaId(self): # String
return self.get_query_params().get('MediaId')
def set_MediaId(self, MediaId): # String
self.add_query_param('MediaId', MediaId)
def METHOD_NAME(self): # String
return self.get_query_params().get('Environment')
def set_Environment(self, Environment): # String
self.add_query_param('Environment', Environment)
def get_StartCreateTime(self): # Long
return self.get_query_params().get('StartCreateTime')
def set_StartCreateTime(self, StartCreateTime): # Long
self.add_query_param('StartCreateTime', StartCreateTime)
def get_UserSite(self): # String
return self.get_query_params().get('UserSite')
def set_UserSite(self, UserSite): # String
self.add_query_param('UserSite', UserSite)
def get_AdSlotName(self): # String
return self.get_query_params().get('AdSlotName')
def set_AdSlotName(self, AdSlotName): # String
self.add_query_param('AdSlotName', AdSlotName) | null |
235 | """Constructors for concrete tool and input source objects."""
import logging
from typing import (
Callable,
Dict,
List,
Optional,
)
from yaml import safe_load
from galaxy.tool_util.loader import load_tool_with_refereces
from galaxy.util import (
ElementTree,
parse_xml_string_to_etree,
)
from galaxy.util.yaml_util import ordered_load
from .cwl import (
CwlToolSource,
tool_proxy,
)
from .interface import (
InputSource,
ToolSource,
)
from .xml import (
XmlInputSource,
XmlToolSource,
)
from .yaml import (
YamlInputSource,
YamlToolSource,
)
from ..fetcher import ToolLocationFetcher
log = logging.getLogger(__name__)
def build_xml_tool_source(xml_string: str) -> XmlToolSource:
return XmlToolSource(parse_xml_string_to_etree(xml_string))
def build_cwl_tool_source(yaml_string: str) -> CwlToolSource:
proxy = tool_proxy(tool_object=safe_load(yaml_string))
# regular CwlToolSource sets basename as tool id, but that's not going to cut it in production
return CwlToolSource(tool_proxy=proxy)
def build_yaml_tool_source(yaml_string: str) -> YamlToolSource:
return YamlToolSource(safe_load(yaml_string))
TOOL_SOURCE_FACTORIES: Dict[str, Callable[[str], ToolSource]] = {
"XmlToolSource": build_xml_tool_source,
"YamlToolSource": build_yaml_tool_source,
"CwlToolSource": build_cwl_tool_source,
}
def get_tool_source(
config_file: Optional[str] = None,
xml_tree: Optional[ElementTree] = None,
enable_beta_formats: bool = True,
tool_location_fetcher: Optional[ToolLocationFetcher] = None,
macro_paths: Optional[List[str]] = None,
tool_source_class: Optional[str] = None,
raw_tool_source: Optional[str] = None,
) -> ToolSource:
"""Return a ToolSource object corresponding to supplied source.
The supplied source may be specified as a file path (using the config_file
parameter) or as an XML object loaded with load_tool_with_refereces.
"""
if xml_tree is not None:
return XmlToolSource(xml_tree, source_path=config_file, macro_paths=macro_paths)
elif config_file is None and raw_tool_source is None:
raise ValueError("get_tool_source called with invalid config_file None.")
if tool_source_class and raw_tool_source:
factory = TOOL_SOURCE_FACTORIES[tool_source_class]
return factory(raw_tool_source)
if tool_location_fetcher is None:
tool_location_fetcher = ToolLocationFetcher()
assert config_file
config_file = tool_location_fetcher.to_tool_path(config_file)
if not enable_beta_formats:
tree, macro_paths = load_tool_with_refereces(config_file)
return XmlToolSource(tree, source_path=config_file, macro_paths=macro_paths)
if config_file.endswith(".yml"):
log.info("Loading tool from YAML - this is experimental - tool will not function in future.")
with open(config_file) as f:
as_dict = ordered_load(f)
return YamlToolSource(as_dict, source_path=config_file)
elif config_file.endswith(".json") or config_file.endswith(".cwl"):
log.info(
"Loading CWL tool - this is experimental - tool likely will not function in future at least in same way."
)
return CwlToolSource(config_file)
else:
tree, macro_paths = load_tool_with_refereces(config_file)
return XmlToolSource(tree, source_path=config_file, macro_paths=macro_paths)
def get_tool_source_from_representation(tool_format, tool_representation):
# TODO: make sure whatever is consuming this method uses ordered load.
log.info("Loading dynamic tool - this is experimental - tool may not function in future.")
if tool_format == "GalaxyTool":
if "version" not in tool_representation:
tool_representation["version"] = "1.0.0" # Don't require version for embedded tools.
return YamlToolSource(tool_representation)
else:
raise Exception(f"Unknown tool representation format [{tool_format}].")
def METHOD_NAME(content):
"""Wrap dicts or XML elements as InputSource if needed.
If the supplied content is already an InputSource object,
it is simply returned. This allow Galaxy to uniformly
consume using the tool input source interface.
"""
if not isinstance(content, InputSource):
if isinstance(content, dict):
content = YamlInputSource(content)
else:
content = XmlInputSource(content)
return content
__all__ = ("get_tool_source", "get_input_source") | null |
236 | """Test singularity{,-ce} & apptainer versions."""
from subprocess import check_output # nosec
import cwltool.singularity
from cwltool.singularity import (
get_version,
is_apptainer_1_or_newer,
is_version_2_6,
is_version_3_1_or_newer,
is_version_3_4_or_newer,
is_version_3_or_newer,
)
def reset_singularity_version_cache() -> None:
"""Reset the cache for testing."""
cwltool.singularity._SINGULARITY_VERSION = None
cwltool.singularity._SINGULARITY_FLAVOR = ""
def METHOD_NAME(name: str, version: str) -> None:
"""Mock out subprocess.check_output."""
cwltool.singularity.check_output = ( # type: ignore[attr-defined]
lambda c, text: name + " version " + version
)
def restore_check_output() -> None:
"""Undo the mock of subprocess.check_output."""
cwltool.singularity.check_output = check_output # type: ignore[attr-defined]
def test_get_version() -> None:
"""Confirm expected types of singularity.get_version()."""
METHOD_NAME("apptainer", "1.0.1")
reset_singularity_version_cache()
v = get_version()
assert isinstance(v, tuple)
assert isinstance(v[0], list)
assert isinstance(v[1], str)
assert cwltool.singularity._SINGULARITY_VERSION is not None # pylint: disable=protected-access
assert len(cwltool.singularity._SINGULARITY_FLAVOR) > 0 # pylint: disable=protected-access
v_cached = get_version()
assert v == v_cached
assert v[0][0] == 1
assert v[0][1] == 0
assert v[0][2] == 1
assert v[1] == "apptainer"
METHOD_NAME("singularity", "3.8.5")
reset_singularity_version_cache()
v = get_version()
assert v[0][0] == 3
assert v[0][1] == 8
assert v[0][2] == 5
assert v[1] == "singularity"
restore_check_output()
def test_version_checks() -> None:
"""Confirm logic in the various singularity version checks."""
METHOD_NAME("apptainer", "1.0.1")
reset_singularity_version_cache()
assert is_apptainer_1_or_newer()
assert not is_version_2_6()
assert is_version_3_or_newer()
assert is_version_3_1_or_newer()
assert is_version_3_4_or_newer()
METHOD_NAME("apptainer", "0.0.1")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert not is_version_2_6()
assert not is_version_3_or_newer()
assert not is_version_3_1_or_newer()
assert not is_version_3_4_or_newer()
METHOD_NAME("singularity", "0.0.1")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert not is_version_2_6()
assert not is_version_3_or_newer()
assert not is_version_3_1_or_newer()
assert not is_version_3_4_or_newer()
METHOD_NAME("singularity", "0.1")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert not is_version_2_6()
assert not is_version_3_or_newer()
assert not is_version_3_1_or_newer()
assert not is_version_3_4_or_newer()
METHOD_NAME("singularity", "2.6")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert is_version_2_6()
assert not is_version_3_or_newer()
assert not is_version_3_1_or_newer()
assert not is_version_3_4_or_newer()
METHOD_NAME("singularity", "3.0")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert not is_version_2_6()
assert is_version_3_or_newer()
assert not is_version_3_1_or_newer()
assert not is_version_3_4_or_newer()
METHOD_NAME("singularity", "3.1")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert not is_version_2_6()
assert is_version_3_or_newer()
assert is_version_3_1_or_newer()
assert not is_version_3_4_or_newer()
METHOD_NAME("singularity", "3.4")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert not is_version_2_6()
assert is_version_3_or_newer()
assert is_version_3_1_or_newer()
assert is_version_3_4_or_newer()
METHOD_NAME("singularity", "3.6.3")
reset_singularity_version_cache()
assert not is_apptainer_1_or_newer()
assert not is_version_2_6()
assert is_version_3_or_newer()
assert is_version_3_1_or_newer()
assert is_version_3_4_or_newer()
restore_check_output() | null |
237 | import os
import pytest
import mock
import shutil
import tempfile
import xml
from future.moves.urllib.parse import urljoin
from scripts import generate_sitemap
from osf_tests.factories import (AuthUserFactory, ProjectFactory, RegistrationFactory, CollectionFactory,
PreprintFactory, PreprintProviderFactory, EmbargoFactory, UnconfirmedUserFactory)
from website import settings
def get_all_sitemap_urls():
# Create temporary directory for the sitemaps to be generated
generate_sitemap.main()
# Parse the generated XML sitemap file
with open(os.path.join(settings.STATIC_FOLDER, 'sitemaps/sitemap_0.xml')) as f:
tree = xml.etree.ElementTree.parse(f)
shutil.rmtree(settings.STATIC_FOLDER)
# Get all the urls in the sitemap
# Note: namespace was defined in the XML file, therefore necessary to include in tag
namespace = '{http://www.sitemaps.org/schemas/sitemap/0.9}'
urls = [element.text for element in tree.iter(namespace + 'loc')]
return urls
@pytest.mark.django_db
class TestGenerateSitemap:
@pytest.fixture(autouse=True)
def user_admin_project_public(self):
return AuthUserFactory()
@pytest.fixture(autouse=True)
def user_unconfirmed(self):
return UnconfirmedUserFactory()
@pytest.fixture(autouse=True)
def user_admin_project_private(self):
return AuthUserFactory()
@pytest.fixture(autouse=True)
def project_registration_public(self, user_admin_project_public):
return ProjectFactory(creator=user_admin_project_public, is_public=True)
@pytest.fixture(autouse=True)
def project_preprint_osf(self, user_admin_project_public):
return ProjectFactory(creator=user_admin_project_public, is_public=True)
@pytest.fixture(autouse=True)
def project_preprint_other(self, user_admin_project_public):
return ProjectFactory(creator=user_admin_project_public, is_public=True)
@pytest.fixture(autouse=True)
def project_private(self, user_admin_project_private):
return ProjectFactory(creator=user_admin_project_private, is_public=False)
@pytest.fixture(autouse=True)
def project_deleted(self, user_admin_project_public):
return ProjectFactory(creator=user_admin_project_public, is_deleted=True)
@pytest.fixture(autouse=True)
def registration_active(self, user_admin_project_public, project_registration_public):
return RegistrationFactory(project=project_registration_public,
creator=user_admin_project_public,
is_public=True)
@pytest.fixture(autouse=True)
def registration_embargoed(self, user_admin_project_public, project_registration_public):
return RegistrationFactory(project=project_registration_public,
creator=user_admin_project_public,
embargo=EmbargoFactory(user=user_admin_project_public))
@pytest.fixture(autouse=True)
def collection(self, user_admin_project_public):
return CollectionFactory(creator=user_admin_project_public)
@pytest.fixture(autouse=True)
def METHOD_NAME(self):
# Note: at least a provider whose _id == 'osf' have to exist for the script to work
return PreprintProviderFactory(_id='osf', name='osfprovider')
@pytest.fixture(autouse=True)
def provider_other(self):
return PreprintProviderFactory(_id='adl', name='anotherprovider')
@pytest.fixture(autouse=True)
def preprint_osf(self, project_preprint_osf, user_admin_project_public, METHOD_NAME):
return PreprintFactory(project=project_preprint_osf,
creator=user_admin_project_public,
provider=METHOD_NAME)
@pytest.fixture(autouse=True)
def preprint_other(self, project_preprint_other, user_admin_project_public, provider_other):
return PreprintFactory(project=project_preprint_other,
creator=user_admin_project_public,
provider=provider_other)
@pytest.fixture(autouse=True)
def all_included_links(self, user_admin_project_public, user_admin_project_private, project_registration_public,
project_preprint_osf, project_preprint_other,
registration_active, provider_other, preprint_osf,
preprint_other):
# Return urls of all fixtures
urls_to_include = [item['loc'] for item in settings.SITEMAP_STATIC_URLS]
urls_to_include.extend([
user_admin_project_public.url,
user_admin_project_private.url,
project_registration_public.url,
project_preprint_osf.url,
project_preprint_other.url,
registration_active.url,
'/preprints/{}/'.format(preprint_osf._id),
'/preprints/{}/{}/'.format(provider_other._id, preprint_other._id),
'/{}/download/?format=pdf'.format(preprint_osf._id),
'/{}/download/?format=pdf'.format(preprint_other._id)
])
urls_to_include = [urljoin(settings.DOMAIN, item) for item in urls_to_include]
return urls_to_include
@pytest.fixture()
def create_tmp_directory(self):
return tempfile.mkdtemp()
def test_all_links_included(self, all_included_links, create_tmp_directory):
with mock.patch('website.settings.STATIC_FOLDER', create_tmp_directory):
urls = get_all_sitemap_urls()
urls_to_include = all_included_links
assert len(urls_to_include) == len(urls)
assert set(urls_to_include) == set(urls)
def test_unconfirmed_user_not_included(self, create_tmp_directory, user_unconfirmed):
with mock.patch('website.settings.STATIC_FOLDER', create_tmp_directory):
urls = get_all_sitemap_urls()
assert urljoin(settings.DOMAIN, user_unconfirmed.url) not in urls
def test_collection_link_not_included(self, collection, create_tmp_directory):
with mock.patch('website.settings.STATIC_FOLDER', create_tmp_directory):
urls = get_all_sitemap_urls()
assert urljoin(settings.DOMAIN, collection.url) not in urls
def test_private_project_link_not_included(self, project_private, create_tmp_directory):
with mock.patch('website.settings.STATIC_FOLDER', create_tmp_directory):
urls = get_all_sitemap_urls()
assert urljoin(settings.DOMAIN, project_private.url) not in urls
def test_embargoed_registration_link_not_included(self, registration_embargoed, create_tmp_directory):
with mock.patch('website.settings.STATIC_FOLDER', create_tmp_directory):
urls = get_all_sitemap_urls()
assert urljoin(settings.DOMAIN, registration_embargoed.url) not in urls
def test_deleted_project_link_not_included(self, project_deleted, create_tmp_directory):
with mock.patch('website.settings.STATIC_FOLDER', create_tmp_directory):
urls = get_all_sitemap_urls()
assert urljoin(settings.DOMAIN, project_deleted.url) not in urls | null |
238 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class CreateFullNatEntryRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'CreateFullNatEntry','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_FullNatEntryDescription(self): # String
return self.get_query_params().get('FullNatEntryDescription')
def set_FullNatEntryDescription(self, FullNatEntryDescription): # String
self.add_query_param('FullNatEntryDescription', FullNatEntryDescription)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AccessIp(self): # String
return self.get_query_params().get('AccessIp')
def set_AccessIp(self, AccessIp): # String
self.add_query_param('AccessIp', AccessIp)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_NatIpPort(self): # String
return self.get_query_params().get('NatIpPort')
def set_NatIpPort(self, NatIpPort): # String
self.add_query_param('NatIpPort', NatIpPort)
def get_FullNatTableId(self): # String
return self.get_query_params().get('FullNatTableId')
def set_FullNatTableId(self, FullNatTableId): # String
self.add_query_param('FullNatTableId', FullNatTableId)
def get_AccessPort(self): # String
return self.get_query_params().get('AccessPort')
def set_AccessPort(self, AccessPort): # String
self.add_query_param('AccessPort', AccessPort)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_IpProtocol(self): # String
return self.get_query_params().get('IpProtocol')
def set_IpProtocol(self, IpProtocol): # String
self.add_query_param('IpProtocol', IpProtocol)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_FullNatEntryName(self): # String
return self.get_query_params().get('FullNatEntryName')
def set_FullNatEntryName(self, FullNatEntryName): # String
self.add_query_param('FullNatEntryName', FullNatEntryName)
def METHOD_NAME(self): # String
return self.get_query_params().get('NatIp')
def set_NatIp(self, NatIp): # String
self.add_query_param('NatIp', NatIp)
def get_NetworkInterfaceId(self): # String
return self.get_query_params().get('NetworkInterfaceId')
def set_NetworkInterfaceId(self, NetworkInterfaceId): # String
self.add_query_param('NetworkInterfaceId', NetworkInterfaceId) | null |
239 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkmse.endpoint import endpoint_data
class CreateCircuitBreakerRuleRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'mse', '2019-05-31', 'CreateCircuitBreakerRule','mse')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Threshold(self): # Float
return self.get_query_params().get('Threshold')
def set_Threshold(self, Threshold): # Float
self.add_query_param('Threshold', Threshold)
def get_RetryTimeoutMs(self): # Integer
return self.get_query_params().get('RetryTimeoutMs')
def set_RetryTimeoutMs(self, RetryTimeoutMs): # Integer
self.add_query_param('RetryTimeoutMs', RetryTimeoutMs)
def get_AppName(self): # String
return self.get_query_params().get('AppName')
def set_AppName(self, AppName): # String
self.add_query_param('AppName', AppName)
def get_Enable(self): # Boolean
return self.get_query_params().get('Enable')
def set_Enable(self, Enable): # Boolean
self.add_query_param('Enable', Enable)
def get_MinRequestAmount(self): # Integer
return self.get_query_params().get('MinRequestAmount')
def set_MinRequestAmount(self, MinRequestAmount): # Integer
self.add_query_param('MinRequestAmount', MinRequestAmount)
def get_Resource(self): # String
return self.get_query_params().get('Resource')
def METHOD_NAME(self, Resource): # String
self.add_query_param('Resource', Resource)
def get_MaxAllowedRtMs(self): # Integer
return self.get_query_params().get('MaxAllowedRtMs')
def set_MaxAllowedRtMs(self, MaxAllowedRtMs): # Integer
self.add_query_param('MaxAllowedRtMs', MaxAllowedRtMs)
def get_HalfOpenBaseAmountPerStep(self): # Integer
return self.get_query_params().get('HalfOpenBaseAmountPerStep')
def set_HalfOpenBaseAmountPerStep(self, HalfOpenBaseAmountPerStep): # Integer
self.add_query_param('HalfOpenBaseAmountPerStep', HalfOpenBaseAmountPerStep)
def get_StatIntervalMs(self): # Integer
return self.get_query_params().get('StatIntervalMs')
def set_StatIntervalMs(self, StatIntervalMs): # Integer
self.add_query_param('StatIntervalMs', StatIntervalMs)
def get_AppId(self): # String
return self.get_query_params().get('AppId')
def set_AppId(self, AppId): # String
self.add_query_param('AppId', AppId)
def get_Namespace(self): # String
return self.get_query_params().get('Namespace')
def set_Namespace(self, Namespace): # String
self.add_query_param('Namespace', Namespace)
def get_HalfOpenRecoveryStepNum(self): # Integer
return self.get_query_params().get('HalfOpenRecoveryStepNum')
def set_HalfOpenRecoveryStepNum(self, HalfOpenRecoveryStepNum): # Integer
self.add_query_param('HalfOpenRecoveryStepNum', HalfOpenRecoveryStepNum)
def get_AcceptLanguage(self): # String
return self.get_query_params().get('AcceptLanguage')
def set_AcceptLanguage(self, AcceptLanguage): # String
self.add_query_param('AcceptLanguage', AcceptLanguage)
def get_Strategy(self): # Integer
return self.get_query_params().get('Strategy')
def set_Strategy(self, Strategy): # Integer
self.add_query_param('Strategy', Strategy) | null |
240 | from lm_eval.utils import get_rolling_token_windows, make_disjoint_window
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v1():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
(
[9, 10, 11, 12, 13, 14, 15, 16, 17, 18],
[10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
),
(
[19, 20, 21, 22, 23, 24, 25, 26, 27, 28],
[20, 21, 22, 23, 24, 25, 26, 27, 28, 29],
),
([23, 24, 25, 26, 27, 28, 29, 30, 31, 32], [30, 31, 32, 33]),
]
x = list(range(34))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=1,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v2():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([2, 3, 4, 5, 6, 7, 8, 9, 10, 11], [10, 11, 12]),
([5, 6, 7, 8, 9, 10, 11, 12, 13, 14], [13, 14, 15]),
([8, 9, 10, 11, 12, 13, 14, 15, 16, 17], [16, 17, 18]),
([11, 12, 13, 14, 15, 16, 17, 18, 19, 20], [19, 20, 21]),
([14, 15, 16, 17, 18, 19, 20, 21, 22, 23], [22, 23, 24]),
([17, 18, 19, 20, 21, 22, 23, 24, 25, 26], [25, 26, 27]),
([20, 21, 22, 23, 24, 25, 26, 27, 28, 29], [28, 29, 30]),
([23, 24, 25, 26, 27, 28, 29, 30, 31, 32], [31, 32, 33]),
]
x = list(range(34))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=8,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v3():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10]),
([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [11]),
([2, 3, 4, 5, 6, 7, 8, 9, 10, 11], [12]),
([3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [13]),
([4, 5, 6, 7, 8, 9, 10, 11, 12, 13], [14]),
([5, 6, 7, 8, 9, 10, 11, 12, 13, 14], [15]),
([6, 7, 8, 9, 10, 11, 12, 13, 14, 15], [16]),
([7, 8, 9, 10, 11, 12, 13, 14, 15, 16], [17]),
([8, 9, 10, 11, 12, 13, 14, 15, 16, 17], [18]),
([9, 10, 11, 12, 13, 14, 15, 16, 17, 18], [19]),
([10, 11, 12, 13, 14, 15, 16, 17, 18, 19], [20]),
([11, 12, 13, 14, 15, 16, 17, 18, 19, 20], [21]),
([12, 13, 14, 15, 16, 17, 18, 19, 20, 21], [22]),
([13, 14, 15, 16, 17, 18, 19, 20, 21, 22], [23]),
([14, 15, 16, 17, 18, 19, 20, 21, 22, 23], [24]),
([15, 16, 17, 18, 19, 20, 21, 22, 23, 24], [25]),
([16, 17, 18, 19, 20, 21, 22, 23, 24, 25], [26]),
([17, 18, 19, 20, 21, 22, 23, 24, 25, 26], [27]),
([18, 19, 20, 21, 22, 23, 24, 25, 26, 27], [28]),
([19, 20, 21, 22, 23, 24, 25, 26, 27, 28], [29]),
([20, 21, 22, 23, 24, 25, 26, 27, 28, 29], [30]),
([21, 22, 23, 24, 25, 26, 27, 28, 29, 30], [31]),
([22, 23, 24, 25, 26, 27, 28, 29, 30, 31], [32]),
([23, 24, 25, 26, 27, 28, 29, 30, 31, 32], [33]),
]
x = list(range(34))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=10,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v4():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10]),
([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [11]),
([2, 3, 4, 5, 6, 7, 8, 9, 10, 11], [12]),
([3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [13]),
([4, 5, 6, 7, 8, 9, 10, 11, 12, 13], [14]),
([5, 6, 7, 8, 9, 10, 11, 12, 13, 14], [15]),
([6, 7, 8, 9, 10, 11, 12, 13, 14, 15], [16]),
([7, 8, 9, 10, 11, 12, 13, 14, 15, 16], [17]),
([8, 9, 10, 11, 12, 13, 14, 15, 16, 17], [18]),
([9, 10, 11, 12, 13, 14, 15, 16, 17, 18], [19]),
([10, 11, 12, 13, 14, 15, 16, 17, 18, 19], [20]),
([11, 12, 13, 14, 15, 16, 17, 18, 19, 20], [21]),
([12, 13, 14, 15, 16, 17, 18, 19, 20, 21], [22]),
([13, 14, 15, 16, 17, 18, 19, 20, 21, 22], [23]),
([14, 15, 16, 17, 18, 19, 20, 21, 22, 23], [24]),
([15, 16, 17, 18, 19, 20, 21, 22, 23, 24], [25]),
([16, 17, 18, 19, 20, 21, 22, 23, 24, 25], [26]),
([17, 18, 19, 20, 21, 22, 23, 24, 25, 26], [27]),
([18, 19, 20, 21, 22, 23, 24, 25, 26, 27], [28]),
([19, 20, 21, 22, 23, 24, 25, 26, 27, 28], [29]),
]
x = list(range(30))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=10,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v5():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
(
[9, 10, 11, 12, 13, 14, 15, 16, 17, 18],
[10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
),
(
[19, 20, 21, 22, 23, 24, 25, 26, 27, 28],
[20, 21, 22, 23, 24, 25, 26, 27, 28, 29],
),
]
x = list(range(30))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=1,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def METHOD_NAME():
gold = [
([-100, 0], [0, 1]),
([1, 2], [2, 3]),
([3, 4], [4, 5]),
([5, 6], [6, 7]),
([6, 7], [8]),
]
x = list(range(9))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=2,
context_len=1,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
def test_get_rolling_token_windows_empty():
generator = get_rolling_token_windows(
token_list=[],
prefix_token=-100,
max_seq_len=2,
context_len=1,
)
n = 0
for _ in generator:
n += 1
assert n == 0
def test_make_disjoint_window():
assert make_disjoint_window(([1, 2, 3, 4, 5], [2, 3, 4, 5, 6])) == (
[1],
[2, 3, 4, 5, 6],
)
assert make_disjoint_window(([1, 2, 3, 4, 5], [4, 5, 6])) == ([1, 2, 3], [4, 5, 6])
assert make_disjoint_window(([1, 2, 3, 4, 5], [6])) == ([1, 2, 3, 4, 5], [6]) | null |
241 | import argparse
import csv
import re
from django.core.management import BaseCommand
from pola.company.models import Brand, Company
from pola.management.command_utils import ask_yes_no
from pola.product.models import Product
def update_product(self, brand, ean_code, company, product_name):
product = Product.objects.filter(code=ean_code).first()
if product:
product.brand = brand
product.name = product_name
product.company = company
product.save()
self.stdout.write(self.style.SUCCESS(f"Successfully updated product {product_name}"))
else:
Product(company=company, brand=brand, code=ean_code, name=product_name).save()
self.stdout.write(self.style.SUCCESS(f"Successfully created product {product_name}"))
def nip_number(value):
if len(value) == 10 and value.isdigit():
return value
raise argparse.ArgumentTypeError(f"Invalid NIP number: '{value}'")
class Command(BaseCommand):
help = 'Import lidl companies from .tsv file'
def add_arguments(self, parser):
parser.add_argument('tsv_filepath', type=argparse.FileType('r'))
parser.add_argument('company_nip', type=nip_number)
parser.add_argument(
'--noinput',
'--no-input',
action='store_false',
dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind. ',
)
def METHOD_NAME(self, *args, **options):
brand_owner = Company.objects.filter(nip__exact=options['company_nip']).first()
if not brand_owner:
self.stdout.write(self.style.ERROR(f'Company with nip {options["company_nip"]} does not exist.'))
return
if options['interactive'] and not ask_yes_no(
f'You selected company: {brand_owner.official_name} with nip: {brand_owner.nip}. Proceed? (Y/n)'
):
self.stdout.write(self.style.ERROR('Operation cancelled.'))
return
with options['tsv_filepath'] as csv_file:
csv_reader = csv.reader(csv_file, delimiter='\t')
index_successful = 0
line_no = 0
for line_no, row in enumerate(csv_reader):
# skip column names
if line_no == 0:
continue
# find polish nip
nip = row[8].split(",")[0][2::]
brand_name = row[2]
ean_codes = re.findall(r'[0-9]+', row[0])
product_name = row[5]
# check if brand exist in db
brand = Brand.objects.filter(company=brand_owner, common_name=brand_name).first()
if not brand:
brand = Brand(company=brand_owner, common_name=brand_name)
brand.save()
self.stdout.write(self.style.SUCCESS(f"Successfully created brand {brand.common_name} "))
# check if company from tsv file exist in db
company = Company.objects.filter(nip__exact=nip).first()
if not company:
self.stdout.write(
self.style.WARNING(
f'Company with nip {nip} does not exist. '
f'Script will create a new company with name: {row[7]} nip: {nip}'
)
)
company = Company(nip=nip, name=row[7])
company.save()
if len(ean_codes) > 1:
self.stdout.write(
self.style.WARNING(
f"Product with name {product_name} has multiple ean codes: "
f"{', '.join(str(x) for x in ean_codes)}. "
f"Script will create/update multiple products, one for each code."
)
)
for code in ean_codes:
# check if product exist in db
update_product(self, brand, code, company, product_name)
index_successful += 1
if line_no == 0:
self.stdout.write(self.style.SUCCESS('Empty file. Nothing to do'))
else:
self.stdout.write(self.style.SUCCESS(f'Processed {line_no} products successful.')) | null |
242 | # coding=utf-8
from tests import unittest
from mock import MagicMock, patch, Mock
from aliyunsdkcore.endpoint.location_service_endpoint_resolver \
import LocationServiceEndpointResolver
from aliyunsdkcore.endpoint.resolver_endpoint_request import ResolveEndpointRequest
from aliyunsdkcore.acs_exception.exceptions import ServerException
from aliyunsdkcore.compat import ensure_bytes
class TestLocationServiceEndpointResolver(unittest.TestCase):
def test_location_service_endpoint(self):
resolver = LocationServiceEndpointResolver(None)
self.assertEqual(resolver._location_service_endpoint,
"location-readonly.aliyuncs.com")
resolver.set_location_service_endpoint("new location endpoint")
self.assertEqual(resolver._location_service_endpoint,
"new location endpoint")
def METHOD_NAME(self):
resolver = LocationServiceEndpointResolver(None)
request = ResolveEndpointRequest(
"cn-huhehaote", "ecs", "servicecode", "")
self.assertEqual("ecs.servicecode.cn-huhehaote.openAPI",
resolver.get_endpoint_key_from_request(request))
def test_resolver(self):
resolver = LocationServiceEndpointResolver(None)
# no location_service_code
request = ResolveEndpointRequest("", "", "", "")
self.assertEqual(resolver.resolve(request), None)
# invalid products
resolver._invalid_product_codes.add("invalid_product")
request = ResolveEndpointRequest(
"cn-huhehaote", "invalid_product", "servicecode", "")
self.assertEqual(resolver.resolve(request), None)
# invalid region id
resolver._invalid_region_ids.add("invalid_region_id")
request = ResolveEndpointRequest(
"invalid_region_id", "product", "servicecode", "")
self.assertEqual(resolver.resolve(request), None)
# match cache
request = ResolveEndpointRequest(
"region_id", "product", "servicecode", "")
resolver.endpoints_data["product.servicecode.region_id.openAPI"] = "the fake endpoint"
self.assertEqual(resolver.resolve(request), "the fake endpoint")
def test_is_region_id_valid(self):
resolver = LocationServiceEndpointResolver(None)
request = ResolveEndpointRequest(
"region_id", "product", "", "")
self.assertFalse(resolver.is_region_id_valid(request))
resolver._invalid_region_ids.add("invalid_region_id")
request = ResolveEndpointRequest(
"invalid_region_id", "product", "servicecode", "")
self.assertFalse(resolver.is_region_id_valid(request))
def test_is_product_code_valid(self):
resolver = LocationServiceEndpointResolver(None)
request = ResolveEndpointRequest(
"region_id", "product", "", "")
self.assertFalse(resolver.is_product_code_valid(request))
resolver._invalid_product_codes.add("invalid_product")
request = ResolveEndpointRequest(
"region_id", "invalid_product", "servicecode", "")
self.assertFalse(resolver.is_product_code_valid(request))
def test_resolver_with_location(self):
client = Mock()
client.do_action_with_exception.return_value = ensure_bytes(
'{"Code": "Success","Endpoints": {"Endpoint": []}}')
resolver = LocationServiceEndpointResolver(client)
request = ResolveEndpointRequest(
"region_id", "product", "servicecode", "")
self.assertEqual(resolver.resolve(request), None)
def test_resolver_with_location2(self):
client = Mock()
client.do_action_with_exception.return_value = ensure_bytes(
'{"Code": "Success","Endpoints": {"Endpoint": [{"ServiceCode":"servicecode",' +
'"Type":"innerAPI","Endpoint":"the inner endpoint"},{"ServiceCode":"servicecode",' +
'"Type":"openAPI","Endpoint":"the endpoint"}]}}')
resolver = LocationServiceEndpointResolver(client)
request = ResolveEndpointRequest(
"region_id", "product", "servicecode", "")
self.assertEqual(resolver.resolve(request), "the endpoint")
def test_resolver_with_server_exception(self):
client = Mock()
client.do_action_with_exception.side_effect = ServerException(
"OTHER_ERROR_CODE", "msg")
resolver = LocationServiceEndpointResolver(client)
request = ResolveEndpointRequest(
"region_id", "product", "servicecode", "")
with self.assertRaises(ServerException) as ex:
resolver.resolve(request)
self.assertEqual(ex.exception.error_code, "OTHER_ERROR_CODE")
self.assertEqual(
ex.exception.message, "msg")
def test_resolver_with_server_exception_invalid_regionid(self):
client = Mock()
client.do_action_with_exception.side_effect = ServerException(
"InvalidRegionId", "The specified region does not exist.")
resolver = LocationServiceEndpointResolver(client)
request = ResolveEndpointRequest(
"region_id", "product", "servicecode", "")
self.assertEqual(resolver.resolve(request), None)
client.do_action_with_exception.side_effect = ServerException(
"Illegal Parameter", "Please check the parameters")
resolver = LocationServiceEndpointResolver(client)
request = ResolveEndpointRequest(
"region_id", "product", "servicecode", "")
self.assertEqual(resolver.resolve(request), None) | null |
243 | #
# junitxml: extensions to Python unittest to get output junitxml
# Copyright (C) 2009 Robert Collins <[email protected]>
#
# Copying permitted under the LGPL-3 licence, included with this library.
"""unittest compatible JUnit XML output."""
import datetime
import re
import time
import unittest
# same format as sys.version_info: "A tuple containing the five components of
# the version number: major, minor, micro, releaselevel, and serial. All
# values except releaselevel are integers; the release level is 'alpha',
# 'beta', 'candidate', or 'final'. The version_info value corresponding to the
# Python version 2.0 is (2, 0, 0, 'final', 0)." Additionally we use a
# releaselevel of 'dev' for unreleased under-development code.
#
# If the releaselevel is 'alpha' then the major/minor/micro components are not
# established at this point, and setup.py will use a version of next-$(revno).
# If the releaselevel is 'final', then the tarball will be major.minor.micro.
# Otherwise it is major.minor.micro~$(revno).
__version__ = (0, 7, 0, 'alpha', 0)
def test_suite():
import junitxml.tests
return junitxml.tests.test_suite()
class LocalTimezone(datetime.tzinfo):
def __init__(self):
self._offset = None
# It seems that the minimal possible implementation is to just return all
# None for every function, but then it breaks...
def utcoffset(self, dt):
if self._offset is None:
t = 1260423030 # arbitrary, but doesn't handle dst very well
dt = datetime.datetime
self._offset = (dt.fromtimestamp(t) - dt.utcfromtimestamp(t))
return self._offset
def dst(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return None
def _error_name(eclass):
module = eclass.__module__
if module not in ("__main__", "builtins", "exceptions"):
return ".".join([module, eclass.__name__])
return eclass.__name__
_non_cdata = "[\0-\b\x0B-\x1F\uD800-\uDFFF\uFFFE\uFFFF]+"
if "\\u" in _non_cdata:
_non_cdata = _non_cdata.decode("unicode-escape")
def _strip_invalid_chars(s, _sub=re.compile(_non_cdata, re.UNICODE).sub):
if not isinstance(s, unicode):
try:
s = s.decode("utf-8")
except UnicodeDecodeError:
s = s.decode("ascii", "replace")
return _sub("", s).encode("utf-8")
else:
def _strip_invalid_chars(s, _sub=re.compile(_non_cdata, re.UNICODE).sub):
return _sub("", s)
def _escape_content(s):
return (_strip_invalid_chars(s)
.replace("&", "&")
.replace("<", "<")
.replace("]]>", "]]>"))
def _escape_attr(s):
return (_strip_invalid_chars(s)
.replace("&", "&")
.replace("<", "<")
.replace("]]>", "]]>")
.replace('"', """)
.replace("\t", "	")
.replace("\n", "
"))
class JUnitXmlResult(unittest.TestResult):
"""A TestResult which outputs JUnit compatible XML."""
def __init__(self, stream):
"""Create a JUnitXmlResult.
:param stream: A stream to write results to. Note that due to the
nature of JUnit XML output, nnothing will be written to the stream
until stopTestRun() is called.
"""
self.__super = super(JUnitXmlResult, self)
self.__super.__init__()
# GZ 2010-09-03: We have a problem if passed a text stream in Python 3
# as really we want to write raw UTF-8 to ensure that
# the encoding is not mangled later
self._stream = stream
self._results = []
self._set_time = None
self._test_start = None
self._run_start = None
self._tz_info = None
def startTestRun(self):
"""Start a test run."""
self._run_start = self._now()
def _get_tzinfo(self):
if self._tz_info is None:
self._tz_info = LocalTimezone()
return self._tz_info
def _now(self):
if self._set_time is not None:
return self._set_time
else:
return datetime.datetime.now(self._get_tzinfo())
def time(self, a_datetime):
self._set_time = a_datetime
if (self._run_start is not None and
self._run_start > a_datetime):
self._run_start = a_datetime
def startTest(self, test):
self.__super.startTest(test)
self._test_start = self._now()
def _duration(self, from_datetime):
try:
delta = self._now() - from_datetime
except TypeError:
n = self._now()
delta = datetime.timedelta(-1)
seconds = delta.days * 3600*24 + delta.seconds
return seconds + 0.000001 * delta.microseconds
def _test_case_string(self, test):
duration = self._duration(self._test_start)
test_id = test.id()
# Split on the last dot not inside a parameter
class_end = test_id.rfind(".", 0, test_id.find("("))
if class_end == -1:
classname, name = "", test_id
else:
classname, name = test_id[:class_end], test_id[class_end+1:]
self._results.append('<testcase classname="%s" name="%s" '
'time="%0.3f"' % (_escape_attr(classname), _escape_attr(name), duration))
def stopTestRun(self):
"""Stop a test run.
This allows JUnitXmlResult to output the XML representation of the test
run.
"""
duration = self._duration(self._run_start)
self._stream.write('<testsuite errors="%d" failures="%d" name="" '
'tests="%d" time="%0.3f">\n' % (len(self.errors),
len(self.failures) + len(getattr(self, "unexpectedSuccesses", ())),
self.testsRun, duration))
self._stream.write(''.join(self._results))
self._stream.write('</testsuite>\n')
def addError(self, test, error):
self.__super.addError(test, error)
self._test_case_string(test)
self._results.append('>\n')
self._results.append('<error type="%s">%s</error>\n</testcase>\n' % (
_escape_attr(_error_name(error[0])),
_escape_content(self._exc_info_to_string(error, test))))
def addFailure(self, test, error):
self.__super.addFailure(test, error)
self._test_case_string(test)
self._results.append('>\n')
self._results.append('<failure type="%s">%s</failure>\n</testcase>\n' %
(_escape_attr(_error_name(error[0])),
_escape_content(self._exc_info_to_string(error, test))))
def addSuccess(self, test):
self.__super.addSuccess(test)
self._test_case_string(test)
self._results.append('/>\n')
def addSkip(self, test, reason):
try:
self.__super.addSkip(test, reason)
except AttributeError:
# Python < 2.7|3.1
pass
self._test_case_string(test)
self._results.append('>\n')
self._results.append('<skip>%s</skip>\n</testcase>\n'% _escape_attr(reason))
def METHOD_NAME(self, test):
try:
self.__super.METHOD_NAME(test)
except AttributeError:
# Python < 2.7|3.1
pass
self._test_case_string(test)
self._results.append('>\n')
self._results.append('<failure type="unittest.case._UnexpectedSuccess"/>\n</testcase>\n')
def addExpectedFailure(self, test, error):
try:
self.__super.addExpectedFailure(test, error)
except AttributeError:
# Python < 2.7|3.1
pass
self._test_case_string(test)
self._results.append('/>\n')
| null |
244 | # License: MIT
# Copyright © 2023 Frequenz Energy-as-a-Service GmbH
"""Tests for the moving window."""
import asyncio
from collections.abc import Iterator, Sequence
from datetime import datetime, timedelta, timezone
import async_solipsism
import numpy as np
import pytest
import time_machine
from frequenz.channels import Broadcast, Sender
from frequenz.sdk.timeseries import UNIX_EPOCH, Sample
from frequenz.sdk.timeseries._moving_window import MovingWindow
from frequenz.sdk.timeseries._quantities import Quantity
from frequenz.sdk.timeseries._resampling import ResamplerConfig
# Setting 'autouse' has no effect as this method replaces the event loop for all tests in the file.
@pytest.fixture()
def event_loop() -> Iterator[async_solipsism.EventLoop]:
"""Replace the loop with one that doesn't interact with the outside world."""
loop = async_solipsism.EventLoop()
yield loop
loop.close()
async def push_logical_meter_data(
sender: Sender[Sample[Quantity]],
test_seq: Sequence[float],
start_ts: datetime = UNIX_EPOCH,
) -> None:
"""Push data in the passed sender to mock `LogicalMeter` behaviour.
Starting with UNIX_EPOCH.
Args:
sender: Sender for pushing resampled samples to the `MovingWindow`.
test_seq: The Sequence that is pushed into the `MovingWindow`.
start_ts: The start timestamp of the `MovingWindow`.
"""
for i, j in zip(test_seq, range(0, len(test_seq))):
timestamp = start_ts + timedelta(seconds=j)
await sender.send(Sample(timestamp, Quantity(float(i))))
await asyncio.sleep(0.0)
def init_moving_window(
size: timedelta,
) -> tuple[MovingWindow, Sender[Sample[Quantity]]]:
"""Initialize the moving window with given shape.
Args:
size: The size of the `MovingWindow`
Returns:
tuple[MovingWindow, Sender[Sample]]: A pair of sender and `MovingWindow`.
"""
lm_chan = Broadcast[Sample[Quantity]]("lm_net_power")
lm_tx = lm_chan.new_sender()
window = MovingWindow(size, lm_chan.new_receiver(), timedelta(seconds=1))
return window, lm_tx
async def test_access_window_by_index() -> None:
"""Test indexing a window by integer index."""
window, sender = init_moving_window(timedelta(seconds=1))
async with window:
await push_logical_meter_data(sender, [1])
assert np.array_equal(window[0], 1.0)
async def test_access_window_by_timestamp() -> None:
"""Test indexing a window by timestamp."""
window, sender = init_moving_window(timedelta(seconds=1))
async with window:
await push_logical_meter_data(sender, [1])
assert np.array_equal(window[UNIX_EPOCH], 1.0)
async def test_access_window_by_int_slice() -> None:
"""Test accessing a subwindow with an integer slice.
Note that the second test is overwriting the data of the first test.
since the push_lm_data function is starting with the same initial timestamp.
"""
window, sender = init_moving_window(timedelta(seconds=14))
async with window:
await push_logical_meter_data(sender, range(0, 5))
assert np.array_equal(window[3:5], np.array([3.0, 4.0]))
data = [1, 2, 2.5, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1]
await push_logical_meter_data(sender, data)
assert np.array_equal(window[5:14], np.array(data[5:14]))
async def test_access_window_by_ts_slice() -> None:
"""Test accessing a subwindow with a timestamp slice."""
window, sender = init_moving_window(timedelta(seconds=5))
async with window:
await push_logical_meter_data(sender, range(0, 5))
time_start = UNIX_EPOCH + timedelta(seconds=3)
time_end = time_start + timedelta(seconds=2)
assert np.array_equal(window[time_start:time_end], np.array([3.0, 4.0])) # type: ignore
async def test_access_empty_window() -> None:
"""Test accessing an empty window, should throw IndexError."""
window, _ = init_moving_window(timedelta(seconds=5))
async with window:
with pytest.raises(IndexError, match=r"^The buffer is empty\.$"):
_ = window[42]
async def test_window_size() -> None:
"""Test the size of the window."""
window, sender = init_moving_window(timedelta(seconds=5))
async with window:
assert window.capacity == 5, "Wrong window capacity"
assert len(window) == 0, "Window should be empty"
await push_logical_meter_data(sender, range(0, 2))
assert window.capacity == 5, "Wrong window capacity"
assert len(window) == 2, "Window should be partially full"
await push_logical_meter_data(sender, range(2, 20))
assert window.capacity == 5, "Wrong window capacity"
assert len(window) == 5, "Window should be full"
# pylint: disable=redefined-outer-name
async def test_resampling_window(fake_time: time_machine.Coordinates) -> None:
"""Test resampling in MovingWindow."""
channel = Broadcast[Sample[Quantity]]("net_power")
sender = channel.new_sender()
window_size = timedelta(seconds=16)
input_sampling = timedelta(seconds=1)
output_sampling = timedelta(seconds=2)
resampler_config = ResamplerConfig(resampling_period=output_sampling)
async with MovingWindow(
size=window_size,
resampled_data_recv=channel.new_receiver(),
input_sampling_period=input_sampling,
resampler_config=resampler_config,
) as window:
assert window.capacity == window_size / output_sampling, "Wrong window capacity"
assert len(window) == 0, "Window should be empty at the beginning"
stream_values = [4.0, 8.0, 2.0, 6.0, 5.0] * 100
for value in stream_values:
timestamp = datetime.now(tz=timezone.utc)
sample = Sample(timestamp, Quantity(float(value)))
await sender.send(sample)
await asyncio.sleep(0.1)
fake_time.shift(0.1)
assert len(window) == window_size / output_sampling
for value in window: # type: ignore
assert 4.9 < value < 5.1
async def METHOD_NAME() -> None:
"""Test indexing a window by timestamp."""
window, sender = init_moving_window(timedelta(seconds=5))
async with window:
await push_logical_meter_data(
sender, [1, 2], start_ts=UNIX_EPOCH + timedelta(seconds=1)
)
assert window.oldest_timestamp == UNIX_EPOCH + timedelta(seconds=1)
assert window.newest_timestamp == UNIX_EPOCH + timedelta(seconds=2) | null |
245 | from arm.logicnode.arm_nodes import *
class MathNode(ArmLogicTreeNode):
"""Mathematical operations on values."""
bl_idname = 'LNMathNode'
bl_label = 'Math'
arm_version = 3
@staticmethod
def METHOD_NAME(obj, prop_name, value):
return obj.bl_rna.properties[prop_name].enum_items[value].identifier
@staticmethod
def get_count_in(operation_name):
return {
'Add': 0,
'Subtract': 0,
'Multiply': 0,
'Divide': 0,
'Sine': 1,
'Cosine': 1,
'Abs': 1,
'Tangent': 1,
'Arcsine': 1,
'Arccosine': 1,
'Arctangent': 1,
'Logarithm': 1,
'Round': 2,
'Floor': 1,
'Ceil': 1,
'Square Root': 1,
'Fract': 1,
'Exponent': 1,
'Max': 2,
'Min': 2,
'Power': 2,
'Arctan2': 2,
'Modulo': 2,
'Less Than': 2,
'Greater Than': 2,
'Ping-Pong': 2
}.get(operation_name, 0)
def get_enum(self):
return self.get('property0', 0)
def set_enum(self, value):
# Checking the selection of another operation
select_current = self.METHOD_NAME(self, 'property0', value)
select_prev = self.property0
if select_prev != select_current:
# Many arguments: Add, Subtract, Multiply, Divide
if (self.get_count_in(select_current) == 0):
while (len(self.inputs) < 2):
self.add_input('ArmFloatSocket', 'Value ' + str(len(self.inputs)))
# 2 arguments: Max, Min, Power, Arctan2, Modulo, Less Than, Greater Than, Ping-Pong
if (self.get_count_in(select_current) == 2):
while (len(self.inputs) > 2):
self.inputs.remove(self.inputs.values()[-1])
while (len(self.inputs) < 2):
self.add_input('ArmFloatSocket', 'Value ' + str(len(self.inputs)))
# 1 argument: Sine, Cosine, Abs, Tangent, Arcsine, Arccosine, Arctangent, Logarithm, Round, Floor, Ceil, Square Root, Fract, Exponent
if (self.get_count_in(select_current) == 1):
while (len(self.inputs) > 1):
self.inputs.remove(self.inputs.values()[-1])
self['property0'] = value
if (self.property0 == 'Round'):
self.inputs[1].name = 'Precision'
elif (self.property0 == 'Ping-Pong'):
self.inputs[1].name = 'Scale'
elif (len(self.inputs) > 1): self.inputs[1].name = 'Value 1'
property0: HaxeEnumProperty(
'property0',
items = [('Add', 'Add', 'Add'),
('Multiply', 'Multiply', 'Multiply'),
('Sine', 'Sine', 'Sine'),
('Cosine', 'Cosine', 'Cosine'),
('Max', 'Maximum', 'Max'),
('Min', 'Minimum', 'Min'),
('Abs', 'Absolute', 'Abs'),
('Subtract', 'Subtract', 'Subtract'),
('Divide', 'Divide', 'Divide'),
('Tangent', 'Tangent', 'Tangent'),
('Arcsine', 'Arcsine', 'Arcsine'),
('Arccosine', 'Arccosine', 'Arccosine'),
('Arctangent', 'Arctangent', 'Arctangent'),
('Power', 'Power', 'Power'),
('Logarithm', 'Logarithm', 'Logarithm'),
('Round', 'Round', 'Round (Value 1 precision of decimal places)'),
('Less Than', 'Less Than', 'Less Than'),
('Greater Than', 'Greater Than', 'Greater Than'),
('Modulo', 'Modulo', 'Modulo'),
('Arctan2', 'Arctan2', 'Arctan2'),
('Floor', 'Floor', 'Floor'),
('Ceil', 'Ceil', 'Ceil'),
('Fract', 'Fract', 'Fract'),
('Square Root', 'Square Root', 'Square Root'),
('Exponent', 'Exponent', 'Exponent'),
('Ping-Pong', 'Ping-Pong', 'The output value is moved between 0.0 and the Scale based on the input value')],
name='', default='Add', set=set_enum, get=get_enum)
property1: HaxeBoolProperty('property1', name='Clamp', default=False)
def __init__(self):
array_nodes[str(id(self))] = self
def arm_init(self, context):
self.add_input('ArmFloatSocket', 'Value 0', default_value=0.0)
self.add_input('ArmFloatSocket', 'Value 1', default_value=0.0)
self.add_output('ArmFloatSocket', 'Result')
def draw_buttons(self, context, layout):
layout.prop(self, 'property1')
layout.prop(self, 'property0')
# Many arguments: Add, Subtract, Multiply, Divide
if (self.get_count_in(self.property0) == 0):
row = layout.row(align=True)
column = row.column(align=True)
op = column.operator('arm.node_add_input', text='Add Value', icon='PLUS', emboss=True)
op.node_index = str(id(self))
op.socket_type = 'ArmFloatSocket'
op.name_format = 'Value {0}'
column = row.column(align=True)
op = column.operator('arm.node_remove_input', text='', icon='X', emboss=True)
op.node_index = str(id(self))
if len(self.inputs) == 2:
column.enabled = False
def draw_label(self) -> str:
return f'{self.bl_label}: {self.property0}'
def get_replacement_node(self, node_tree: bpy.types.NodeTree):
if self.arm_version not in (0, 2):
raise LookupError()
return NodeReplacement.Identity(self) | null |
246 | """
@file
@brief This file contains the project file listview, used by the main window
@author Noah Figg <[email protected]>
@author Jonathan Thomas <[email protected]>
@section LICENSE
Copyright (c) 2008-2018 OpenShot Studios, LLC
(http://www.openshotstudios.com). This file is part of
OpenShot Video Editor (http://www.openshot.org), an open-source project
dedicated to delivering high quality video editing and animation solutions
to the world.
OpenShot Video Editor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenShot Video Editor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
"""
from PyQt5.QtCore import QSize, Qt, QPoint, QRegExp
from PyQt5.QtGui import QDrag, QCursor
from PyQt5.QtWidgets import QListView, QAbstractItemView, QMenu
from classes import info
from classes.app import get_app
from classes.logger import log
from classes.query import File
class FilesListView(QListView):
""" A ListView QWidget used on the main window """
drag_item_size = QSize(48, 48)
drag_item_center = QPoint(24, 24)
def contextMenuEvent(self, event):
event.accept()
# Set context menu mode
app = get_app()
app.context_menu_object = "files"
index = self.indexAt(event.pos())
# Build menu
menu = QMenu(self)
menu.addAction(self.win.actionImportFiles)
menu.addAction(self.win.actionDetailsView)
if index.isValid():
# Look up the model item and our unique ID
model = self.model()
# Look up file_id from 5th column of row
id_index = index.sibling(index.row(), 5)
file_id = model.data(id_index, Qt.DisplayRole)
# If a valid file selected, show file related options
menu.addSeparator()
# Add edit title option (if svg file)
file = File.get(id=file_id)
if file and file.data.get("path").endswith(".svg"):
menu.addAction(self.win.actionEditTitle)
menu.addAction(self.win.actionDuplicateTitle)
menu.addSeparator()
menu.addAction(self.win.actionPreview_File)
menu.addSeparator()
menu.addAction(self.win.actionSplitClip)
menu.addAction(self.win.actionExportClips)
menu.addSeparator()
menu.addAction(self.win.actionAdd_to_Timeline)
menu.addAction(self.win.actionFile_Properties)
menu.addSeparator()
menu.addAction(self.win.actionRemove_from_Project)
menu.addSeparator()
# Show menu
menu.popup(event.globalPos())
def dragEnterEvent(self, event):
# If dragging urls onto widget, accept
if not event.mimeData().hasUrls():
event.ignore()
return
event.accept()
event.setDropAction(Qt.CopyAction)
def startDrag(self, supportedActions):
""" Override startDrag method to display custom icon """
# Get first column indexes for all selected rows
selected = self.selectionModel().selectedRows(0)
# Get image of current item
current = self.selectionModel().currentIndex()
if not current.isValid() and selected:
current = selected[0]
if not current.isValid():
log.warning("No draggable items found in model!")
return False
# Get icon from column 0 on same row as current item
icon = current.sibling(current.row(), 0).data(Qt.DecorationRole)
# Start drag operation
drag = QDrag(self)
drag.setMimeData(self.model().mimeData(selected))
drag.setPixmap(icon.pixmap(self.drag_item_size))
drag.setHotSpot(self.drag_item_center)
drag.exec_()
# Without defining this method, the 'copy' action doesn't show with cursor
def dragMoveEvent(self, event):
event.accept()
# Handle a drag and drop being dropped on widget
def dropEvent(self, event):
if not event.mimeData().hasUrls():
# Nothing we're interested in
event.reject()
return
event.accept()
# Use try/finally so we always reset the cursor
try:
# Set cursor to waiting
get_app().setOverrideCursor(QCursor(Qt.WaitCursor))
qurl_list = event.mimeData().urls()
log.info("Processing drop event for {} urls".format(len(qurl_list)))
self.files_model.process_urls(qurl_list)
finally:
# Restore cursor
get_app().restoreOverrideCursor()
# Pass file add requests to the model
def add_file(self, filepath):
self.files_model.add_files(filepath)
def METHOD_NAME(self):
self.refresh_view()
def refresh_view(self):
"""Filter files with proxy class"""
model = self.model()
filter_text = self.win.filesFilter.text()
model.setFilterRegExp(QRegExp(filter_text.replace(' ', '.*'), Qt.CaseInsensitive))
col = model.sortColumn()
model.sort(col)
def resize_contents(self):
pass
def __init__(self, model, *args):
# Invoke parent init
super().__init__(*args)
# Get a reference to the window object
self.win = get_app().window
# Get Model data
self.files_model = model
self.setModel(self.files_model.proxy_model)
# Remove the default selection model and wire up to the shared one
self.selectionModel().deleteLater()
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.setSelectionBehavior(QAbstractItemView.SelectRows)
self.setSelectionModel(self.files_model.selection_model)
# Keep track of mouse press start position to determine when to start drag
self.setAcceptDrops(True)
self.setDragEnabled(True)
self.setDropIndicatorShown(True)
# Setup header columns and layout
self.setIconSize(info.LIST_ICON_SIZE)
self.setGridSize(info.LIST_GRID_SIZE)
self.setViewMode(QListView.IconMode)
self.setResizeMode(QListView.Adjust)
self.setUniformItemSizes(True)
self.setStyleSheet('QListView::item { padding-top: 2px; }')
self.setWordWrap(False)
self.setTextElideMode(Qt.ElideRight)
self.files_model.ModelRefreshed.connect(self.refresh_view)
# setup filter events
app = get_app()
app.window.filesFilter.textChanged.connect(self.METHOD_NAME) | null |
247 | """Misc. useful functions that can be used at many places in the program."""
import os
import subprocess as sp
import warnings
import proglog
OS_NAME = os.name
def cross_platform_popen_params(popen_params):
"""Wrap with this function a dictionary of ``subprocess.Popen`` kwargs and
will be ready to work without unexpected behaviours in any platform.
Currently, the implementation will add to them:
- ``creationflags=0x08000000``: no extra unwanted window opens on Windows
when the child process is created. Only added on Windows.
"""
if OS_NAME == "nt":
popen_params["creationflags"] = 0x08000000
return popen_params
def subprocess_call(cmd, logger="bar"):
"""Executes the given subprocess command.
Set logger to None or a custom Proglog logger to avoid printings.
"""
logger = proglog.default_bar_logger(logger)
logger(message="MoviePy - Running:\n>>> " + " ".join(cmd))
popen_params = cross_platform_popen_params(
{"stdout": sp.DEVNULL, "stderr": sp.PIPE, "stdin": sp.DEVNULL}
)
proc = sp.Popen(cmd, **popen_params)
out, err = proc.communicate() # proc.wait()
proc.stderr.close()
if proc.returncode:
logger(message="MoviePy - Command returned an error")
raise IOError(err.decode("utf8"))
else:
logger(message="MoviePy - Command successful")
del proc
def convert_to_seconds(time):
"""Will convert any time into seconds.
If the type of `time` is not valid,
it's returned as is.
Here are the accepted formats:
>>> convert_to_seconds(15.4) # seconds
15.4
>>> convert_to_seconds((1, 21.5)) # (min,sec)
81.5
>>> convert_to_seconds((1, 1, 2)) # (hr, min, sec)
3662
>>> convert_to_seconds('01:01:33.045')
3693.045
>>> convert_to_seconds('01:01:33,5') # coma works too
3693.5
>>> convert_to_seconds('1:33,5') # only minutes and secs
99.5
>>> convert_to_seconds('33.5') # only secs
33.5
"""
factors = (1, 60, 3600)
if isinstance(time, str):
time = [float(part.replace(",", ".")) for part in time.split(":")]
if not isinstance(time, (tuple, list)):
return time
return sum(mult * part for mult, part in zip(factors, reversed(time)))
def deprecated_version_of(func, old_name):
"""Indicates that a function is deprecated and has a new name.
`func` is the new function and `old_name` is the name of the deprecated
function.
Returns
-------
deprecated_func
A function that does the same thing as `func`, but with a docstring
and a printed message on call which say that the function is
deprecated and that you should use `func` instead.
Examples
--------
>>> # The badly named method 'to_file' is replaced by 'write_file'
>>> class Clip:
>>> def write_file(self, some args):
>>> # blablabla
>>>
>>> Clip.to_file = deprecated_version_of(Clip.write_file, 'to_file')
"""
# Detect new name of func
new_name = func.__name__
warning = (
"The function ``%s`` is deprecated and is kept temporarily "
"for backwards compatibility.\nPlease use the new name, "
"``%s``, instead."
) % (old_name, new_name)
def deprecated_func(*args, **kwargs):
warnings.warn("MoviePy: " + warning, PendingDeprecationWarning)
return func(*args, **kwargs)
deprecated_func.__doc__ = warning
return deprecated_func
# Non-exhaustive dictionary to store default information.
# Any addition is most welcome.
# Note that 'gif' is complicated to place. From a VideoFileClip point of view,
# it is a video, but from a HTML5 point of view, it is an image.
extensions_dict = {
"mp4": {"type": "video", "codec": ["libx264", "libmpeg4", "aac"]},
"mkv": {"type": "video", "codec": ["libx264", "libmpeg4", "aac"]},
"ogv": {"type": "video", "codec": ["libtheora"]},
"webm": {"type": "video", "codec": ["libvpx"]},
"avi": {"type": "video"},
"mov": {"type": "video"},
"ogg": {"type": "audio", "codec": ["libvorbis"]},
"mp3": {"type": "audio", "codec": ["libmp3lame"]},
"wav": {"type": "audio", "codec": ["pcm_s16le", "pcm_s24le", "pcm_s32le"]},
"m4a": {"type": "audio", "codec": ["libfdk_aac"]},
}
for ext in ["jpg", "jpeg", "png", "bmp", "tiff"]:
extensions_dict[ext] = {"type": "image"}
def METHOD_NAME(codec):
"""Returns the correspondent file extension for a codec.
Parameters
----------
codec : str
Video or audio codec name.
"""
if codec in extensions_dict:
# codec is already the extension
return codec
for ext, infos in extensions_dict.items():
if codec in infos.get("codec", []):
return ext
raise ValueError(
"The audio_codec you chose is unknown by MoviePy. "
"You should report this. In the meantime, you can "
"specify a temp_audiofile with the right extension "
"in write_videofile."
) | null |
248 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkadb.endpoint import endpoint_data
class DescribeDiagnosisRecordsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'adb', '2019-03-15', 'DescribeDiagnosisRecords','ads')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_QueryCondition(self): # String
return self.get_query_params().get('QueryCondition')
def set_QueryCondition(self, QueryCondition): # String
self.add_query_param('QueryCondition', QueryCondition)
def get_StartTime(self): # String
return self.get_query_params().get('StartTime')
def set_StartTime(self, StartTime): # String
self.add_query_param('StartTime', StartTime)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_Database(self): # String
return self.get_query_params().get('Database')
def set_Database(self, Database): # String
self.add_query_param('Database', Database)
def get_ClientIp(self): # String
return self.get_query_params().get('ClientIp')
def set_ClientIp(self, ClientIp): # String
self.add_query_param('ClientIp', ClientIp)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_Keyword(self): # String
return self.get_query_params().get('Keyword')
def set_Keyword(self, Keyword): # String
self.add_query_param('Keyword', Keyword)
def get_Lang(self): # String
return self.get_query_params().get('Lang')
def set_Lang(self, Lang): # String
self.add_query_param('Lang', Lang)
def get_Order(self): # String
return self.get_query_params().get('Order')
def set_Order(self, Order): # String
self.add_query_param('Order', Order)
def get_MaxScanSize(self): # Long
return self.get_query_params().get('MaxScanSize')
def set_MaxScanSize(self, MaxScanSize): # Long
self.add_query_param('MaxScanSize', MaxScanSize)
def get_ResourceGroup(self): # String
return self.get_query_params().get('ResourceGroup')
def set_ResourceGroup(self, ResourceGroup): # String
self.add_query_param('ResourceGroup', ResourceGroup)
def METHOD_NAME(self): # String
return self.get_query_params().get('DBClusterId')
def set_DBClusterId(self, DBClusterId): # String
self.add_query_param('DBClusterId', DBClusterId)
def get_PatternId(self): # String
return self.get_query_params().get('PatternId')
def set_PatternId(self, PatternId): # String
self.add_query_param('PatternId', PatternId)
def get_EndTime(self): # String
return self.get_query_params().get('EndTime')
def set_EndTime(self, EndTime): # String
self.add_query_param('EndTime', EndTime)
def get_MinPeakMemory(self): # Long
return self.get_query_params().get('MinPeakMemory')
def set_MinPeakMemory(self, MinPeakMemory): # Long
self.add_query_param('MinPeakMemory', MinPeakMemory)
def get_MinScanSize(self): # Long
return self.get_query_params().get('MinScanSize')
def set_MinScanSize(self, MinScanSize): # Long
self.add_query_param('MinScanSize', MinScanSize)
def get_MaxPeakMemory(self): # Long
return self.get_query_params().get('MaxPeakMemory')
def set_MaxPeakMemory(self, MaxPeakMemory): # Long
self.add_query_param('MaxPeakMemory', MaxPeakMemory)
def get_UserName(self): # String
return self.get_query_params().get('UserName')
def set_UserName(self, UserName): # String
self.add_query_param('UserName', UserName) | null |
249 | from typing import Any, Callable, Dict, Optional
from urllib.parse import urlparse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.urls import reverse
from selenium.webdriver.common.by import By
from selenium.webdriver.firefox.webdriver import WebDriver
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.support import expected_conditions as exp_cond
from selenium.webdriver.support.wait import WebDriverWait
from nextcloudappstore.core.tests.e2e import (
SELENIUM_WAIT_SEC,
TEST_EMAIL,
TEST_PASSWORD,
TEST_USER,
)
from nextcloudappstore.user.facades import create_user, delete_user
class BaseStoreTest(StaticLiveServerTestCase):
def by_id(self, id):
return self.selenium.find_element(By.ID, id)
def METHOD_NAME(self, selector: str, multiple: bool = False):
if multiple:
return self.selenium.find_elements(By.CSS_SELECTOR, selector)
else:
return self.selenium.find_element(By.CSS_SELECTOR, selector)
def setUp(self):
self.selenium = WebDriver()
self.selenium.implicitly_wait(SELENIUM_WAIT_SEC)
user = create_user(TEST_USER, TEST_PASSWORD, TEST_EMAIL)
user.firstname = "live"
user.lastname = "test"
user.save()
def tearDown(self):
try:
delete_user(TEST_USER)
except Exception:
pass
super().tearDown()
self.selenium.quit()
def go_to(self, url_name: str, kwargs: Dict[str, str] = None) -> None:
app_url = reverse(url_name, kwargs=kwargs)
self.selenium.get("%s%s" % (self.live_server_url, app_url))
def go_to_app(self, app_id):
self.go_to("app-detail", {"id": app_id})
def go_to_app_register(self):
self.go_to("app-register")
def go_to_app_upload(self):
self.go_to("app-upload")
def go_to_login(self):
self.selenium.get("%s%s" % (self.live_server_url, "/login/"))
def login(self, user: str = TEST_USER, password: str = TEST_PASSWORD):
self.go_to_login()
user_input = self.selenium.find_element(By.NAME, "login")
user_input.send_keys(user)
pass_input = self.selenium.find_element(By.NAME, "password")
pass_input.send_keys(password)
self.selenium.find_element(By.XPATH, '//button[@type="submit"]').click()
def assert_can_not_login(self):
self.go_to("home")
self.go_to_login()
self.by_id("id_login").clear()
self.by_id("id_login").send_keys("livetest")
self.by_id("id_password").clear()
self.by_id("id_password").send_keys("livetest")
self.METHOD_NAME('.auth-form button[type="submit"]').click()
error = self.METHOD_NAME(".auth-form .text-danger")
self.assertTrue(error.is_displayed())
self.assertOnPage("account_login")
def logout(self):
self.findNavigationLink("account_logout").click()
def wait_for(self, selector: str, then: Callable[[WebElement], None]) -> Any:
element = WebDriverWait(self.selenium, SELENIUM_WAIT_SEC).until(
exp_cond.visibility_of_element_located((By.CSS_SELECTOR, selector))
)
return then(element)
def wait_for_url(self, url: str, timeout: Optional[int] = None) -> Any:
if timeout is None:
timeout = SELENIUM_WAIT_SEC
WebDriverWait(self.selenium, timeout).until(exp_cond.url_contains(url))
def wait_for_url_match(self, url: str, timeout: Optional[int] = None) -> Any:
if timeout is None:
timeout = SELENIUM_WAIT_SEC
WebDriverWait(self.selenium, timeout).until(exp_cond.url_matches(url))
def wait_for_url_to_be(self, url: str, timeout: Optional[int] = None) -> Any:
if timeout is None:
timeout = SELENIUM_WAIT_SEC
WebDriverWait(self.selenium, timeout).until(self._url_to_be(url))
def assertOnPage(self, url_name: str, kwargs: Dict[str, str] = None) -> None:
parsed = urlparse(self.selenium.current_url)
url = reverse(url_name, kwargs=kwargs)
self.assertEqual(url, parsed.path)
def findNavigationLink(self, url_name: str, kwargs: Dict[str, str] = None):
route = reverse(url_name, kwargs=kwargs)
return self.METHOD_NAME('#navbar a[href="%s"]' % route)
@staticmethod
def _url_to_be(url: str) -> Callable[[Any], bool]:
def _predicate(driver):
return url.removesuffix("/") == str(driver.current_url).removesuffix("/")
return _predicate | null |
250 | import asyncio
from typing import Any, Dict, List, Optional
from hummingbot.connector.exchange.injective_v2.injective_query_executor import BaseInjectiveQueryExecutor
class ProgrammableQueryExecutor(BaseInjectiveQueryExecutor):
def __init__(self):
self._ping_responses = asyncio.Queue()
self._spot_markets_responses = asyncio.Queue()
self._derivative_market_responses = asyncio.Queue()
self._derivative_markets_responses = asyncio.Queue()
self._spot_order_book_responses = asyncio.Queue()
self._derivative_order_book_responses = asyncio.Queue()
self._transaction_by_hash_responses = asyncio.Queue()
self._account_portfolio_responses = asyncio.Queue()
self._simulate_transaction_responses = asyncio.Queue()
self._send_transaction_responses = asyncio.Queue()
self._spot_trades_responses = asyncio.Queue()
self._derivative_trades_responses = asyncio.Queue()
self._historical_spot_orders_responses = asyncio.Queue()
self._historical_derivative_orders_responses = asyncio.Queue()
self._transaction_block_height_responses = asyncio.Queue()
self._funding_rates_responses = asyncio.Queue()
self._oracle_prices_responses = asyncio.Queue()
self._funding_payments_responses = asyncio.Queue()
self._derivative_positions_responses = asyncio.Queue()
self._spot_order_book_updates = asyncio.Queue()
self._public_spot_trade_updates = asyncio.Queue()
self._derivative_order_book_updates = asyncio.Queue()
self._public_derivative_trade_updates = asyncio.Queue()
self._oracle_prices_updates = asyncio.Queue()
self._subaccount_positions_events = asyncio.Queue()
self._subaccount_balance_events = asyncio.Queue()
self._historical_spot_order_events = asyncio.Queue()
self._historical_derivative_order_events = asyncio.Queue()
self._transaction_events = asyncio.Queue()
async def ping(self):
response = await self._ping_responses.get()
return response
async def spot_markets(self, status: str) -> Dict[str, Any]:
response = await self._spot_markets_responses.get()
return response
async def derivative_markets(self, status: str) -> Dict[str, Any]:
response = await self._derivative_markets_responses.get()
return response
async def derivative_market(self, market_id: str) -> Dict[str, Any]:
response = await self._derivative_market_responses.get()
return response
async def get_spot_orderbook(self, market_id: str) -> Dict[str, Any]:
response = await self._spot_order_book_responses.get()
return response
async def get_derivative_orderbook(self, market_id: str) -> Dict[str, Any]:
response = await self._derivative_order_book_responses.get()
return response
async def get_tx_by_hash(self, tx_hash: str) -> Dict[str, Any]:
response = await self._transaction_by_hash_responses.get()
return response
async def get_tx_block_height(self, tx_hash: str) -> int:
response = await self._transaction_block_height_responses.get()
return response
async def account_portfolio(self, account_address: str) -> Dict[str, Any]:
response = await self._account_portfolio_responses.get()
return response
async def simulate_tx(self, tx_byte: bytes) -> Dict[str, Any]:
response = await self._simulate_transaction_responses.get()
return response
async def send_tx_sync_mode(self, tx_byte: bytes) -> Dict[str, Any]:
response = await self._send_transaction_responses.get()
return response
async def METHOD_NAME(
self,
market_ids: List[str],
subaccount_id: Optional[str] = None,
start_time: Optional[int] = None,
skip: Optional[int] = None,
limit: Optional[int] = None,
) -> Dict[str, Any]:
response = await self._spot_trades_responses.get()
return response
async def get_derivative_trades(
self,
market_ids: List[str],
subaccount_id: Optional[str] = None,
start_time: Optional[int] = None,
skip: Optional[int] = None,
limit: Optional[int] = None,
) -> Dict[str, Any]:
response = await self._derivative_trades_responses.get()
return response
async def get_historical_spot_orders(
self,
market_ids: List[str],
subaccount_id: str,
start_time: int,
skip: int,
) -> Dict[str, Any]:
response = await self._historical_spot_orders_responses.get()
return response
async def get_historical_derivative_orders(
self,
market_ids: List[str],
subaccount_id: str,
start_time: int,
skip: int,
) -> Dict[str, Any]:
response = await self._historical_derivative_orders_responses.get()
return response
async def get_funding_rates(self, market_id: str, limit: int) -> Dict[str, Any]:
response = await self._funding_rates_responses.get()
return response
async def get_funding_payments(self, subaccount_id: str, market_id: str, limit: int) -> Dict[str, Any]:
response = await self._funding_payments_responses.get()
return response
async def get_derivative_positions(self, subaccount_id: str, skip: int) -> Dict[str, Any]:
response = await self._derivative_positions_responses.get()
return response
async def get_oracle_prices(
self,
base_symbol: str,
quote_symbol: str,
oracle_type: str,
oracle_scale_factor: int,
) -> Dict[str, Any]:
response = await self._oracle_prices_responses.get()
return response
async def spot_order_book_updates_stream(self, market_ids: List[str]):
while True:
next_ob_update = await self._spot_order_book_updates.get()
yield next_ob_update
async def public_spot_trades_stream(self, market_ids: List[str]):
while True:
next_trade = await self._public_spot_trade_updates.get()
yield next_trade
async def derivative_order_book_updates_stream(self, market_ids: List[str]):
while True:
next_ob_update = await self._derivative_order_book_updates.get()
yield next_ob_update
async def public_derivative_trades_stream(self, market_ids: List[str]):
while True:
next_trade = await self._public_derivative_trade_updates.get()
yield next_trade
async def oracle_prices_stream(self, oracle_base: str, oracle_quote: str, oracle_type: str):
while True:
next_update = await self._oracle_prices_updates.get()
yield next_update
async def subaccount_positions_stream(self, subaccount_id: str):
while True:
next_event = await self._subaccount_positions_events.get()
yield next_event
async def subaccount_balance_stream(self, subaccount_id: str):
while True:
next_event = await self._subaccount_balance_events.get()
yield next_event
async def subaccount_historical_spot_orders_stream(
self, market_id: str, subaccount_id: str
):
while True:
next_event = await self._historical_spot_order_events.get()
yield next_event
async def subaccount_historical_derivative_orders_stream(
self, market_id: str, subaccount_id: str
):
while True:
next_event = await self._historical_derivative_order_events.get()
yield next_event
async def transactions_stream(self,):
while True:
next_event = await self._transaction_events.get()
yield next_event | null |
251 | import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
NodeFactory,
ProjectFactory,
RegistrationFactory,
AuthUserFactory,
PrivateLinkFactory,
)
@pytest.fixture()
def user():
return AuthUserFactory()
@pytest.fixture()
def registration_with_children(user):
project = ProjectFactory(creator=user)
NodeFactory(parent=project, creator=user)
NodeFactory(parent=project, creator=user)
NodeFactory(parent=project, creator=user)
NodeFactory(parent=project, creator=user)
return RegistrationFactory(
project=project
)
@pytest.fixture()
def METHOD_NAME(registration_with_children):
return '/{}registrations/{}/children/'.format(
API_BASE,
registration_with_children._id,
)
@pytest.fixture()
def view_only_link(registration_with_children):
view_only_link = PrivateLinkFactory(name='testlink')
view_only_link.nodes.add(registration_with_children)
view_only_link.save()
return view_only_link
@pytest.fixture()
def registration_with_children_approved(user, registration_with_children):
registration_with_children._initiate_approval(user)
approval_token = registration_with_children.registration_approval.approval_state[user._id]['approval_token']
registration_with_children.registration_approval.approve(user, approval_token)
return registration_with_children
@pytest.fixture()
def registration_with_children_approved_url(registration_with_children_approved):
return '/{}registrations/{}/children/'.format(
API_BASE,
registration_with_children_approved._id,
)
@pytest.mark.django_db
class TestRegistrationsChildrenList:
def test_registrations_children_list(self, user, app, registration_with_children, METHOD_NAME):
component_one, component_two, component_three, component_four = registration_with_children.nodes
res = app.get(METHOD_NAME, auth=user.auth)
ids = [node['id'] for node in res.json['data']]
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert component_one._id in ids
assert component_two._id in ids
def test_return_registrations_list_no_auth_approved(self, user, app, registration_with_children_approved, registration_with_children_approved_url):
component_one, component_two, component_three, component_four = registration_with_children_approved.nodes
res = app.get(registration_with_children_approved_url)
ids = [node['id'] for node in res.json['data']]
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert component_one._id in ids
assert component_two._id in ids
def test_registrations_list_no_auth_unapproved(self, user, app, registration_with_children, METHOD_NAME):
res = app.get(METHOD_NAME, expect_errors=True)
assert res.status_code == 401
assert res.content_type == 'application/vnd.api+json'
def test_registration_children_no_auth_vol(self, user, app, registration_with_children,
METHOD_NAME, view_only_link):
# viewed through private link
component_one, component_two, component_three, component_four = registration_with_children.nodes
# get registration related_counts with vol before vol is attached to components
node_url = '/{}registrations/{}/?related_counts=children&view_only={}'.format(API_BASE,
registration_with_children._id, view_only_link.key)
res = app.get(node_url)
assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 0
# view only link is not attached to components
view_only_link_url = '{}?view_only={}'.format(METHOD_NAME, view_only_link.key)
res = app.get(view_only_link_url)
ids = [node['id'] for node in res.json['data']]
assert res.status_code == 200
assert len(ids) == 0
assert component_one._id not in ids
assert component_two._id not in ids
# view only link now attached to components
view_only_link.nodes.add(component_one)
view_only_link.nodes.add(component_two)
view_only_link.nodes.add(component_three)
view_only_link.nodes.add(component_four)
res = app.get(view_only_link_url)
ids = [node['id'] for node in res.json['data']]
assert res.status_code == 200
assert component_one._id in ids
assert component_two._id in ids
# get registration related_counts with vol once vol is attached to components
res = app.get(node_url)
assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 4
# make private vol anonymous
view_only_link.anonymous = True
view_only_link.save()
res = app.get(view_only_link_url)
assert 'contributors' not in res.json['data'][0]['relationships']
child_ids = [item['id'] for item in res.json['data']]
assert component_one._id in child_ids
assert component_two._id in child_ids
assert component_three._id in child_ids
assert component_four._id in child_ids
# delete vol
view_only_link.is_deleted = True
view_only_link.save()
res = app.get(view_only_link_url, expect_errors=True)
assert res.status_code == 401
@pytest.mark.django_db
class TestRegistrationChildrenListFiltering:
def test_registration_child_filtering(self, app, user, registration_with_children):
component_one, component_two, component_three, component_four = registration_with_children.nodes
url = '/{}registrations/{}/children/?filter[title]={}'.format(
API_BASE,
registration_with_children._id,
component_one.title
)
res = app.get(url, auth=user.auth)
ids = [node['id'] for node in res.json['data']]
assert component_one._id in ids
assert component_two._id not in ids | null |
252 | import platform
import secrets
import signal
from pickle import PickleError
from typing import Any, Mapping
from unittest.mock import AsyncMock, MagicMock
import pytest
from aiodocker.exceptions import DockerError
from ai.backend.agent.docker.agent import DockerAgent
from ai.backend.common.docker import ImageRef
from ai.backend.common.exception import ImageNotAvailable
from ai.backend.common.types import AutoPullBehavior
class DummyEtcd:
async def METHOD_NAME(self, key: str) -> Mapping[str, Any]:
return {}
@pytest.fixture
async def agent(local_config, test_id, mocker):
dummy_etcd = DummyEtcd()
mocked_etcd_get_prefix = AsyncMock(return_value={})
mocker.patch.object(dummy_etcd, "get_prefix", new=mocked_etcd_get_prefix)
test_case_id = secrets.token_hex(8)
agent = await DockerAgent.new(
dummy_etcd,
local_config,
stats_monitor=None,
error_monitor=None,
skip_initial_scan=True,
) # for faster test iteration
agent.local_instance_id = test_case_id # use per-test private registry file
try:
yield agent
finally:
await agent.shutdown(signal.SIGTERM)
@pytest.mark.asyncio
async def test_init(agent, mocker):
print(agent)
ret = platform.machine().lower()
aliases = {
"arm64": "aarch64", # macOS with LLVM
"amd64": "x86_64", # Windows/Linux
"x64": "x86_64", # Windows
"x32": "x86", # Windows
"i686": "x86", # Windows
}
arch = aliases.get(ret, ret)
imgref = ImageRef("index.docker.io/lablup/lua:5.3-alpine3.8", architecture=arch)
query_digest = "sha256:b000000000000000000000000000000000000000000000000000000000000001"
digest_matching_image_info = {
"Id": "sha256:b000000000000000000000000000000000000000000000000000000000000001",
"RepoTags": [
"lablup/lua:5.3-alpine3.8",
],
}
digest_mismatching_image_info = {
"Id": "sha256:a000000000000000000000000000000000000000000000000000000000000002",
"RepoTags": [
"lablup/lua:5.3-alpine3.8",
],
}
@pytest.mark.asyncio
async def test_auto_pull_digest_when_digest_matching(agent, mocker):
behavior = AutoPullBehavior.DIGEST
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(return_value=digest_matching_image_info)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_digest_when_digest_mismatching(agent, mocker):
behavior = AutoPullBehavior.DIGEST
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(return_value=digest_mismatching_image_info)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_digest_when_missing(agent, mocker):
behavior = AutoPullBehavior.DIGEST
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(
side_effect=DockerError(
status=404,
data={"message": "Simulated missing image"},
),
)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert pull
inspect_mock.assert_called_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_tag_when_digest_matching(agent, mocker):
behavior = AutoPullBehavior.TAG
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(return_value=digest_matching_image_info)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_tag_when_digest_mismatching(agent, mocker):
behavior = AutoPullBehavior.TAG
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(return_value=digest_mismatching_image_info)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_tag_when_missing(agent, mocker):
behavior = AutoPullBehavior.TAG
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(
side_effect=DockerError(
status=404,
data={"message": "Simulated missing image"},
),
)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert pull
inspect_mock.assert_called_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_none_when_digest_matching(agent, mocker):
behavior = AutoPullBehavior.NONE
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(return_value=digest_matching_image_info)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_none_when_digest_mismatching(agent, mocker):
behavior = AutoPullBehavior.NONE
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(return_value=digest_mismatching_image_info)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_none_when_missing(agent, mocker):
behavior = AutoPullBehavior.NONE
docker_mock = MagicMock()
docker_mock.close = AsyncMock()
docker_mock.images = MagicMock()
inspect_mock = AsyncMock(
side_effect=DockerError(
status=404,
data={"message": "Simulated missing image"},
),
)
docker_mock.images.inspect = inspect_mock
mocker.patch("ai.backend.agent.docker.agent.Docker", return_value=docker_mock)
with pytest.raises(ImageNotAvailable) as e:
await agent.check_image(imgref, query_digest, behavior)
assert e.value.args[0] is imgref
inspect_mock.assert_called_with(imgref.canonical)
@pytest.mark.asyncio
async def test_save_last_registry_exception(agent, mocker):
agent.latest_registry_written_time = MagicMock(return_value=0)
mocker.patch("ai.backend.agent.agent.pickle.dump", side_effect=PickleError)
registry_state_path = (
agent.local_config["agent"]["var-base-path"]
/ f"last_registry.{agent.local_instance_id}.dat"
)
await agent.save_last_registry()
assert not registry_state_path.exists() | null |
253 | # Copyright 2019 Camptocamp (http://www.camptocamp.com).
# @author Simone Orsi <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from .common import TestMultiUserCommon
class TestMultiUserCustomer(TestMultiUserCommon):
"""Test interaction with /customer endpoint."""
def test_create_customer_no_multi_user(self):
self.backend.customer_multi_user = False
self.data.update({"external_id": "cust1"})
params = dict(self.data, company_token="ABCDEF")
res = self.service.dispatch("create", params=params)["data"]
partner = self.env["res.partner"].browse(res["id"])
self.assertFalse(partner.parent_id)
self.assertFalse(partner.has_invader_user)
self._test_partner_data(partner, self.data)
def test_create_customer_multi_user(self):
self.backend.customer_multi_user = True
data = dict(self.data, external_id="new1", email="[email protected]", name="New One")
# customer 1
params = dict(data, company_token="ABCDEF")
res = self.service.dispatch("create", params=params)["data"]
partner1 = self.env["res.partner"].browse(res["id"])
self.assertEqual(partner1.parent_id, self.company)
self.assertEqual(partner1.type, "contact")
self.assertTrue(partner1.has_invader_user)
self._test_partner_data(partner1, data)
# customer 2
data = dict(self.data, external_id="new2", email="[email protected]", name="New Two")
params = dict(data, company_token="ABCDEF")
res = self.service.dispatch("create", params=params)["data"]
partner2 = self.env["res.partner"].browse(res["id"])
self.assertEqual(partner2.parent_id, self.company)
self.assertEqual(partner2.type, "contact")
self.assertTrue(partner2.has_invader_user)
self._test_partner_data(partner2, data)
# both are there
self.assertIn(partner1, self.company.child_ids)
self.assertIn(partner2, self.company.child_ids)
# the company is not an invader user
self.assertFalse(self.company.has_invader_user)
def test_update_customer_multi_user_store_cache(self):
self.backend.customer_multi_user = True
data = dict(self.data, external_id="new1", email="[email protected]", name="New One")
# customer 1
params = dict(data, company_token="ABCDEF")
res = self.service.dispatch("create", params=params)["data"]
partner1 = self.env["res.partner"].browse(res["id"])
# Update happens via address service. To be changed as per
# https://github.com/shopinvader/odoo-shopinvader/issues/530
params["name"] = params["name"] + " UPDATED!"
self._update_work_ctx(self.address_service, partner=partner1)
res = self.address_service.dispatch("update", partner1.id, params=params)
# By default the customer partner is the main partner
# hence we are not editing the main profile and we don't need cache
self.assertNotIn("store_cache", res)
# Change the policy
self.backend.multi_user_profile_policy = "record_id"
params["name"] = params["name"] + " UPDATED 2 times!"
res = self.address_service.dispatch("update", partner1.id, params=params)
self.assertTrue(
res["store_cache"]["customer"]["name"].endswith(" UPDATED 2 times!")
)
def test_create_customer_multi_user_wrong_token(self):
self.data.update({"external_id": "cust1"})
params = dict(self.data, company_token="WRONG_TOKEN")
res = self.service.dispatch("create", params=params)["data"]
partner = self.env["res.partner"].browse(res["id"])
# partner is created normally, no relation w/ the company
self.assertFalse(partner.parent_id)
self.assertFalse(partner.has_invader_user)
self._test_partner_data(partner, self.data)
self.assertNotIn(partner, self.company.child_ids)
self.assertFalse(self.company.has_invader_user)
def test_company_data_multi_user_off(self):
self.backend.customer_multi_user = False
service = self._get_service(partner=self.company, usage="customer")
res = service._to_customer_info(self.company)
# multi user not enabled
self.assertNotIn("company_token", res)
self.assertNotIn("main_account", res)
self.assertNotIn("is_simple_user", res)
def test_company_data_multi_user_on(self):
# check on the company
service = self._get_service(partner=self.company, usage="customer")
res = service._to_customer_info(self.company)
self.assertEqual(res["company_token"], "ABCDEF")
self.assertFalse(res["is_simple_user"])
# same user of the company
self.assertEqual(res["main_account"], None)
def METHOD_NAME(self):
# check on a simple user
service = self._get_service(
partner=self.user_binding.record_id, usage="customer"
)
res = service._to_customer_info(self.user_binding.record_id)
self.assertTrue(res["is_simple_user"])
self.assertNotIn("company_token", res)
self.assertEqual(
res["main_account"],
{
"id": self.company.id,
"name": self.company.name,
"ref": self.company.ref,
},
) | null |
254 | from typing import Optional
from AnyQt.QtCore import Qt, QSizeF, QRectF, QPointF
from AnyQt.QtGui import QPixmap, QTransform, QPainter
from AnyQt.QtWidgets import (
QGraphicsWidget, QGraphicsItem, QStyleOptionGraphicsItem, QWidget,
)
from Orange.widgets.utils.graphicslayoutitem import scaled
class GraphicsPixmapWidget(QGraphicsWidget):
def __init__(
self,
parent: Optional[QGraphicsItem] = None,
pixmap: Optional[QPixmap] = None,
scaleContents=False,
aspectMode=Qt.KeepAspectRatio,
**kwargs
) -> None:
self.__scaleContents = scaleContents
self.__aspectMode = aspectMode
self.__pixmap = QPixmap(pixmap) if pixmap is not None else QPixmap()
super().__init__(None, **kwargs)
self.setFlag(QGraphicsWidget.ItemUsesExtendedStyleOption, True)
self.setContentsMargins(0, 0, 0, 0)
if parent is not None:
self.setParentItem(parent)
def setPixmap(self, pixmap: QPixmap) -> None:
self.prepareGeometryChange()
self.__pixmap = QPixmap(pixmap)
self.updateGeometry()
def pixmap(self) -> QPixmap:
return QPixmap(self.__pixmap)
def setAspectRatioMode(self, mode: Qt.AspectRatioMode) -> None:
if self.__aspectMode != mode:
self.__aspectMode = mode
sp = self.sizePolicy()
sp.setHeightForWidth(
self.__aspectMode != Qt.IgnoreAspectRatio and self.__scaleContents
)
self.setSizePolicy(sp)
self.updateGeometry()
def METHOD_NAME(self) -> Qt.AspectRatioMode:
return self.__aspectMode
def setScaleContents(self, scale: bool) -> None:
if self.__scaleContents != scale:
self.__scaleContents = bool(scale)
sp = self.sizePolicy()
sp.setHeightForWidth(
self.__aspectMode != Qt.IgnoreAspectRatio and self.__scaleContents
)
self.setSizePolicy(sp)
self.updateGeometry()
def scaleContents(self) -> bool:
return self.__scaleContents
def sizeHint(self, which, constraint=QSizeF(-1, -1)) -> QSizeF:
if which == Qt.PreferredSize:
sh = QSizeF(self.__pixmap.size())
if self.__scaleContents:
sh = scaled(sh, constraint, self.__aspectMode)
return sh
elif which == Qt.MinimumSize:
if self.__scaleContents:
return QSizeF(0, 0)
else:
return QSizeF(self.__pixmap.size())
elif which == Qt.MaximumSize:
if self.__scaleContents:
return QSizeF()
else:
return QSizeF(self.__pixmap.size())
else:
# Qt.MinimumDescent
return QSizeF()
def pixmapTransform(self) -> QTransform:
if self.__pixmap.isNull():
return QTransform()
pxsize = QSizeF(self.__pixmap.size())
crect = self.contentsRect()
transform = QTransform()
transform = transform.translate(crect.left(), crect.top())
if self.__scaleContents:
csize = scaled(pxsize, crect.size(), self.__aspectMode)
else:
csize = pxsize
xscale = csize.width() / pxsize.width()
yscale = csize.height() / pxsize.height()
return transform.scale(xscale, yscale)
def paint(
self, painter: QPainter, option: QStyleOptionGraphicsItem,
widget: Optional[QWidget] = None
) -> None:
if self.__pixmap.isNull():
return
pixmap = self.__pixmap
crect = self.contentsRect()
exposed = option.exposedRect
exposedcrect = crect.intersected(exposed)
pixmaptransform = self.pixmapTransform()
# map exposed rect to exposed pixmap coords
assert pixmaptransform.type() in (
QTransform.TxNone, QTransform.TxTranslate, QTransform.TxScale
)
pixmaptransform, ok = pixmaptransform.inverted()
if not ok:
painter.drawPixmap(
crect, pixmap, QRectF(QPointF(0, 0), QSizeF(pixmap.size()))
)
else:
exposedpixmap = pixmaptransform.mapRect(exposed)
painter.drawPixmap(exposedcrect, pixmap, exposedpixmap) | null |
255 | # Copyright (c) ZenML GmbH 2022. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Implementation of label config generators for Label Studio."""
from typing import List, Tuple
from zenml.enums import AnnotationTasks
from zenml.logger import get_logger
logger = get_logger(__name__)
TASK_TO_FILENAME_REFERENCE_MAPPING = {
AnnotationTasks.IMAGE_CLASSIFICATION.value: "image",
AnnotationTasks.OBJECT_DETECTION_BOUNDING_BOXES.value: "image",
AnnotationTasks.OCR.value: "image",
AnnotationTasks.TEXT_CLASSIFICATION.value: "image",
}
def _generate_label_config() -> str:
# TODO [HIGH] Implement label config generator
# refactoring out duplicated code from the various functions below
raise NotImplementedError()
def generate_text_classification_label_config(
labels: List[str],
) -> Tuple[str, str]:
"""Generates a Label Studio label config for text classification.
This is based on the basic config example shown at
https://labelstud.io/templates/sentiment_analysis.html.
Args:
labels: A list of labels to be used in the label config.
Returns:
A tuple of the generated label config and the label config type.
Raises:
ValueError: If no labels are provided.
"""
if not labels:
raise ValueError("No labels provided")
label_config_type = AnnotationTasks.TEXT_CLASSIFICATION
label_config_start = """<View>
<Header value="Choose text class:"/>
<Text name="text" value="$text"/>
<Choices name="class" toName="text" choice="single" showInline="true">
"""
label_config_choices = "".join(
f"<Choice value='{label}' />\n" for label in labels
)
label_config_end = "</Choices>\n</View>"
label_config = label_config_start + label_config_choices + label_config_end
return (
label_config,
label_config_type,
)
def generate_image_classification_label_config(
labels: List[str],
) -> Tuple[str, str]:
"""Generates a Label Studio label config for image classification.
This is based on the basic config example shown at
https://labelstud.io/templates/image_classification.html.
Args:
labels: A list of labels to be used in the label config.
Returns:
A tuple of the generated label config and the label config type.
Raises:
ValueError: If no labels are provided.
"""
if not labels:
raise ValueError("No labels provided")
label_config_type = AnnotationTasks.IMAGE_CLASSIFICATION
label_config_start = """<View>
<Image name="image" value="$image"/>
<Choices name="choice" toName="image">
"""
label_config_choices = "".join(
f"<Choice value='{label}' />\n" for label in labels
)
label_config_end = "</Choices>\n</View>"
label_config = label_config_start + label_config_choices + label_config_end
return (
label_config,
label_config_type,
)
def generate_basic_object_detection_bounding_boxes_label_config(
labels: List[str],
) -> Tuple[str, str]:
"""Generates a Label Studio config for object detection with bounding boxes.
This is based on the basic config example shown at
https://labelstud.io/templates/image_bbox.html.
Args:
labels: A list of labels to be used in the label config.
Returns:
A tuple of the generated label config and the label config type.
Raises:
ValueError: If no labels are provided.
"""
if not labels:
raise ValueError("No labels provided")
label_config_type = AnnotationTasks.OBJECT_DETECTION_BOUNDING_BOXES
label_config_start = """<View>
<Image name="image" value="$image"/>
<RectangleLabels name="label" toName="image">
"""
label_config_choices = "".join(
f"<Label value='{label}' />\n" for label in labels
)
label_config_end = "</RectangleLabels>\n</View>"
label_config = label_config_start + label_config_choices + label_config_end
return (
label_config,
label_config_type,
)
def METHOD_NAME(
labels: List[str],
) -> Tuple[str, str]:
"""Generates a Label Studio config for optical character recognition (OCR) labeling task.
This is based on the basic config example shown at
https://labelstud.io/templates/optical_character_recognition.html
Args:
labels: A list of labels to be used in the label config.
Returns:
A tuple of the generated label config and the label config type.
Raises:
ValueError: If no labels are provided.
"""
if not labels:
raise ValueError("No labels provided")
label_config_type = AnnotationTasks.OCR
label_config_start = """
<View>
<Image name="image" value="$ocr" zoom="true" zoomControl="true" rotateControl="true"/>
<View>
<Filter toName="label" minlength="0" name="filter"/>
<Labels name="label" toName="image">
"""
label_config_choices = "".join(
f"<Label value='{label}' />\n" for label in labels
)
label_config_end = """
</Labels>
</View>
<Rectangle name="bbox" toName="image" strokeWidth="3"/>
<Polygon name="poly" toName="image" strokeWidth="3"/>
<TextArea name="transcription" toName="image" editable="true" perRegion="true" required="true" maxSubmissions="1" rows="5" placeholder="Recognized Text" displayMode="region-list"/>
</View>
"""
label_config = label_config_start + label_config_choices + label_config_end
return (
label_config,
label_config_type,
) | null |
256 | '''
Copyright (C) 2017-2023 Bryant Moscon - [email protected]
Please see the LICENSE file for the terms and conditions
associated with this software.
'''
from collections import defaultdict
import asyncio
import logging
from typing import Optional, ByteString
from aiokafka import AIOKafkaProducer
from aiokafka.errors import RequestTimedOutError, KafkaConnectionError, NodeNotReadyError
from yapic import json
from cryptofeed.backends.backend import BackendBookCallback, BackendCallback, BackendQueue
LOG = logging.getLogger('feedhandler')
class KafkaCallback(BackendQueue):
def __init__(self, key=None, numeric_type=float, none_to=None, **kwargs):
"""
You can pass configuration options to AIOKafkaProducer as keyword arguments.
(either individual kwargs, an unpacked dictionary `**config_dict`, or both)
A full list of configuration parameters can be found at
https://aiokafka.readthedocs.io/en/stable/api.html#aiokafka.AIOKafkaProducer
A 'value_serializer' option allows use of other schemas such as Avro, Protobuf etc.
The default serialization is JSON Bytes
Example:
**{'bootstrap_servers': '127.0.0.1:9092',
'client_id': 'cryptofeed',
'acks': 1,
'value_serializer': your_serialization_function}
(Passing the event loop is already handled)
"""
self.producer_config = kwargs
self.producer = None
self.key: str = key or self.default_key
self.numeric_type = numeric_type
self.none_to = none_to
# Do not allow writer to send messages until connection confirmed
self.running = False
def _default_serializer(self, to_bytes: dict | str) -> ByteString:
if isinstance(to_bytes, dict):
return json.dumpb(to_bytes)
elif isinstance(to_bytes, str):
return to_bytes.encode()
else:
raise TypeError(f'{type(to_bytes)} is not a valid Serialization type')
async def _connect(self):
if not self.producer:
loop = asyncio.get_event_loop()
try:
config_keys = ', '.join([k for k in self.producer_config.keys()])
LOG.info(f'{self.__class__.__name__}: Configuring AIOKafka with the following parameters: {config_keys}')
self.producer = AIOKafkaProducer(**self.producer_config, loop=loop)
# Quit if invalid config option passed to AIOKafka
except (TypeError, ValueError) as e:
LOG.error(f'{self.__class__.__name__}: Invalid AIOKafka configuration: {e.args}{chr(10)}See https://aiokafka.readthedocs.io/en/stable/api.html#aiokafka.AIOKafkaProducer for list of configuration options')
raise SystemExit
else:
while not self.running:
try:
await self.producer.start()
except KafkaConnectionError:
LOG.error(f'{self.__class__.__name__}: Unable to bootstrap from host(s)')
await asyncio.sleep(10)
else:
LOG.info(f'{self.__class__.__name__}: "{self.producer.client._client_id}" connected to cluster containing {len(self.producer.client.cluster.brokers())} broker(s)')
self.running = True
def METHOD_NAME(self, data: dict) -> str:
return f"{self.key}-{data['exchange']}-{data['symbol']}"
def partition_key(self, data: dict) -> Optional[bytes]:
return None
def partition(self, data: dict) -> Optional[int]:
return None
async def writer(self):
await self._connect()
while self.running:
async with self.read_queue() as updates:
for index in range(len(updates)):
METHOD_NAME = self.METHOD_NAME(updates[index])
# Check for user-provided serializers, otherwise use default
value = updates[index] if self.producer_config.get('value_serializer') else self._default_serializer(updates[index])
key = self.key if self.producer_config.get('key_serializer') else self._default_serializer(self.key)
partition = self.partition(updates[index])
try:
send_future = await self.producer.send(METHOD_NAME, value, key, partition)
await send_future
except RequestTimedOutError:
LOG.error(f'{self.__class__.__name__}: No response received from server within {self.producer._request_timeout_ms} ms. Messages may not have been delivered')
except NodeNotReadyError:
LOG.error(f'{self.__class__.__name__}: Node not ready')
except Exception as e:
LOG.info(f'{self.__class__.__name__}: Encountered an error:{chr(10)}{e}')
LOG.info(f"{self.__class__.__name__}: sending last messages and closing connection '{self.producer.client._client_id}'")
await self.producer.stop()
class TradeKafka(KafkaCallback, BackendCallback):
default_key = 'trades'
class FundingKafka(KafkaCallback, BackendCallback):
default_key = 'funding'
class BookKafka(KafkaCallback, BackendBookCallback):
default_key = 'book'
def __init__(self, *args, snapshots_only=False, snapshot_interval=1000, **kwargs):
self.snapshots_only = snapshots_only
self.snapshot_interval = snapshot_interval
self.snapshot_count = defaultdict(int)
super().__init__(*args, **kwargs)
class TickerKafka(KafkaCallback, BackendCallback):
default_key = 'ticker'
class OpenInterestKafka(KafkaCallback, BackendCallback):
default_key = 'open_interest'
class LiquidationsKafka(KafkaCallback, BackendCallback):
default_key = 'liquidations'
class CandlesKafka(KafkaCallback, BackendCallback):
default_key = 'candles'
class OrderInfoKafka(KafkaCallback, BackendCallback):
default_key = 'order_info'
class TransactionsKafka(KafkaCallback, BackendCallback):
default_key = 'transactions'
class BalancesKafka(KafkaCallback, BackendCallback):
default_key = 'balances'
class FillsKafka(KafkaCallback, BackendCallback):
default_key = 'fills' | null |
257 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeCommandsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeCommands','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_Type(self): # String
return self.get_query_params().get('Type')
def set_Type(self, Type): # String
self.add_query_param('Type', Type)
def get_CommandId(self): # String
return self.get_query_params().get('CommandId')
def set_CommandId(self, CommandId): # String
self.add_query_param('CommandId', CommandId)
def get_PageNumber(self): # Long
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Long
self.add_query_param('PageNumber', PageNumber)
def get_Provider(self): # String
return self.get_query_params().get('Provider')
def set_Provider(self, Provider): # String
self.add_query_param('Provider', Provider)
def METHOD_NAME(self): # String
return self.get_query_params().get('ContentEncoding')
def set_ContentEncoding(self, ContentEncoding): # String
self.add_query_param('ContentEncoding', ContentEncoding)
def get_PageSize(self): # Long
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Long
self.add_query_param('PageSize', PageSize)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_Latest(self): # Boolean
return self.get_query_params().get('Latest')
def set_Latest(self, Latest): # Boolean
self.add_query_param('Latest', Latest)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name) | null |
258 | import json
from json import JSONEncoder
from typing import List, Union, cast
from typeguard import typechecked
from arkouda.client import generic_msg
__all__ = [
"AllSymbols",
"RegisteredSymbols",
"information",
"list_registry",
"list_symbol_table",
"pretty_print_information",
]
AllSymbols = "__AllSymbols__"
RegisteredSymbols = "__RegisteredSymbols__"
def auto_str(cls):
def __str__(self):
return "%s(%s)" % (type(self).__name__, ", ".join("%s=%s" % item for item in vars(self).items()))
cls.__str__ = __str__
return cls
class EntryDecoder(JSONEncoder):
def default(self, o):
return o.__dict__
@auto_str
class InfoEntry:
def __init__(self, **kwargs) -> None:
self.name = kwargs["name"]
self.dtype = kwargs["dtype"]
self.size = kwargs["size"]
self.ndim = kwargs["ndim"]
self.shape = kwargs["shape"]
self.itemsize = kwargs["itemsize"]
self.registered = kwargs["registered"]
@typechecked
def information(names: Union[List[str], str] = RegisteredSymbols) -> str:
"""
Returns JSON formatted string containing information about the objects in names
Parameters
----------
names : Union[List[str], str]
names is either the name of an object or list of names of objects to retrieve info
if names is ak.AllSymbols, retrieves info for all symbols in the symbol table
if names is ak.RegisteredSymbols, retrieves info for all symbols in the registry
Returns
------
str
JSON formatted string containing a list of information for each object in names
Raises
------
RuntimeError
Raised if a server-side error is thrown in the process of
retrieving information about the objects in names
"""
if isinstance(names, str):
if names in [AllSymbols, RegisteredSymbols]:
return cast(str, generic_msg(cmd="info", args={"names": names}))
else:
names = [names] # allows user to call ak.information(pda.name)
return cast(str, generic_msg(cmd="info", args={"names": json.dumps(names)}))
def list_registry(detailed: bool = False):
"""
Return a list containing the names of all registered objects
Parameters
----------
detailed: bool
Default = False
Return details of registry objects. Currently includes object type for any objects
Returns
-------
dict
Dict containing keys "Components" and "Objects".
Raises
------
RuntimeError
Raised if there's a server-side error thrown
"""
data = json.loads(cast(str, generic_msg(cmd="list_registry")))
objs = json.loads(data["Objects"]) if data["Objects"] != "" else []
obj_types = json.loads(data["Object_Types"]) if data["Object_Types"] != "" else []
return {
"Objects": list(zip(objs, obj_types)) if detailed else objs,
"Components": json.loads(data["Components"]),
}
def list_symbol_table() -> List[str]:
"""
Return a list containing the names of all objects in the symbol table
Parameters
----------
None
Returns
-------
list
List of all object names in the symbol table
Raises
------
RuntimeError
Raised if there's a server-side error thrown
"""
return [i.name for i in _parse_json(AllSymbols)]
def _parse_json(names: Union[List[str], str]) -> List[InfoEntry]:
"""
Internal method that converts the JSON output of information into a List of InfoEntry objects
Parameters
----------
names : Union[List[str], str]
Names to pass to information
Returns
-------
List[InfoEntry]
List of InfoEntry python objects for each name in names
Raises
------
RuntimeError
Raised if a server-side error is thrown
"""
return json.loads(information(names), object_hook=lambda d: InfoEntry(**d))
def METHOD_NAME(names: Union[List[str], str] = RegisteredSymbols) -> None:
"""
Prints verbose information for each object in names in a human readable format
Parameters
----------
names : Union[List[str], str]
names is either the name of an object or list of names of objects to retrieve info
if names is ak.AllSymbols, retrieves info for all symbols in the symbol table
if names is ak.RegisteredSymbols, retrieves info for all symbols in the registry
Returns
------
None
Raises
------
RuntimeError
Raised if a server-side error is thrown in the process of
retrieving information about the objects in names
"""
for i in _parse_json(names):
print(i) | null |
259 | import uuid
from galaxy.jobs import (
HasResourceParameters,
JobDestination,
)
from galaxy.jobs.mapper import (
ERROR_MESSAGE_NO_RULE_FUNCTION,
ERROR_MESSAGE_RULE_FUNCTION_NOT_FOUND,
JobRunnerMapper,
)
from galaxy.util import bunch
from . import (
test_rules,
test_rules_override,
)
WORKFLOW_UUID = uuid.uuid1().hex
TOOL_JOB_DESTINATION = JobDestination()
DYNAMICALLY_GENERATED_DESTINATION = JobDestination()
def test_static_mapping():
mapper = __mapper()
assert mapper.get_job_destination({}) is TOOL_JOB_DESTINATION
def test_caching():
mapper = __mapper()
mapper.get_job_destination({})
mapper.get_job_destination({})
assert mapper.job_wrapper.tool.call_count == 1
def test_dynamic_mapping():
mapper = __mapper(__dynamic_destination(dict(function="upload")))
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == "local_runner"
def test_chained_dynamic_mapping():
mapper = __mapper(__dynamic_destination(dict(function="dynamic_chain_1")))
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == "final_destination"
def test_dynamic_mapping_priorities():
mapper = __mapper(__dynamic_destination(dict(function="tophat")))
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
# Next line verifies we using definition in 20_instance.py instead of
# 10_site.py.
assert mapper.job_config.rule_response == "instance_dest_id"
def test_dynamic_mapping_defaults_to_tool_id_as_rule():
mapper = __mapper(__dynamic_destination())
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == "tool1_dest_id"
def test_dynamic_mapping_job_conf_params():
mapper = __mapper(__dynamic_destination(dict(function="check_job_conf_params", param1="7")))
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == "sent_7_dest_id"
def test_dynamic_mapping_function_parameters():
mapper = __mapper(__dynamic_destination(dict(function="check_rule_params", param1="referrer_param")))
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == "all_passed"
def test_dynamic_mapping_resource_parameters():
mapper = __mapper(__dynamic_destination(dict(function="check_resource_params")))
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == "have_resource_params"
def test_dynamic_mapping_workflow_invocation_parameter():
mapper = __mapper(__dynamic_destination(dict(function="check_workflow_invocation_uuid")))
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == WORKFLOW_UUID
def test_dynamic_mapping_no_function():
dest = __dynamic_destination(dict())
mapper = __mapper(dest)
mapper.job_wrapper.tool.all_ids = ["no_such_function"]
error_message = ERROR_MESSAGE_NO_RULE_FUNCTION % dest
__assert_mapper_errors_with_message(mapper, error_message)
def test_dynamic_mapping_missing_function():
dest = __dynamic_destination(dict(function="missing_func"))
mapper = __mapper(dest)
mapper.job_wrapper.tool.all_ids = ["no_such_function"]
error_message = ERROR_MESSAGE_RULE_FUNCTION_NOT_FOUND % ("missing_func")
__assert_mapper_errors_with_message(mapper, error_message)
def test_dynamic_mapping_rule_module_override():
mapper = __mapper(
__dynamic_destination(dict(function="rule_module_override", rules_module=test_rules_override.__name__))
)
assert mapper.get_job_destination({}) is DYNAMICALLY_GENERATED_DESTINATION
assert mapper.job_config.rule_response == "new_rules_package"
def test_dynamic_mapping_externally_set_job_destination():
mapper = __mapper(__dynamic_destination(dict(function="upload")))
# Initially, the mapper should not have a cached destination
assert not hasattr(mapper, "cached_job_destination")
# Overwrite with an externally set job destination
manually_set_destination = JobDestination(runner="dynamic")
mapper.cached_job_destination = manually_set_destination
destination = mapper.get_job_destination({})
assert destination == manually_set_destination
assert mapper.cached_job_destination == manually_set_destination
# Force overwrite with mapper determined destination
mapper.cache_job_destination(None)
assert mapper.cached_job_destination is not None
assert mapper.cached_job_destination != manually_set_destination
assert mapper.job_config.rule_response == "local_runner"
def __assert_mapper_errors_with_message(mapper, message):
exception = None
try:
mapper.get_job_destination({})
except Exception as e:
exception = e
assert exception
assert str(exception) == message, f"{str(exception)} != {message}"
def __mapper(tool_job_destination=TOOL_JOB_DESTINATION):
job_wrapper = MockJobWrapper(tool_job_destination)
job_config = MockJobConfig()
mapper = JobRunnerMapper(job_wrapper, {}, job_config)
mapper.rules_module = test_rules
return mapper
def __dynamic_destination(params=None):
params = params or {}
return JobDestination(runner="dynamic", params=params)
class MockJobConfig:
def __init__(self):
self.rule_response = None
self.dynamic_params = None
def get_destination(self, rep):
# Called to transform dynamic job destination rule response
# from destination id/runner url into a dynamic job destination.
self.rule_response = rep
return DYNAMICALLY_GENERATED_DESTINATION
class MockJobWrapper(HasResourceParameters):
def __init__(self, tool_job_destination):
self.tool = MockTool(tool_job_destination)
self.job_id = 12345
self.app = object()
def is_mock_job_wrapper(self):
return True
def METHOD_NAME(self):
raw_params = {
"threshold": 8,
"__workflow_invocation_uuid__": WORKFLOW_UUID,
}
def get_param_values(app, ignore_errors):
assert app == self.app
params = raw_params.copy()
params["__job_resource"] = {"__job_resource__select": "True", "memory": "8gb"}
return params
return bunch.Bunch(
user=bunch.Bunch(id=6789, email="[email protected]"),
raw_param_dict=lambda: raw_params,
get_param_values=get_param_values,
)
class MockTool:
def __init__(self, tool_job_destination):
self.id = "testtoolshed/devteam/tool1/23abcd13123"
self.call_count = 0
self.tool_job_destination = tool_job_destination
self.all_ids = ["testtoolshed/devteam/tool1/23abcd13123", "tool1"]
def get_job_destination(self, params):
self.call_count += 1
return self.tool_job_destination
def is_mock_tool(self):
return True | null |
260 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkimm.endpoint import endpoint_data
import json
class CreateStoryRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'imm', '2020-09-30', 'CreateStory','imm')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_CustomLabels(self): # String
return self.get_body_params().get('CustomLabels')
def set_CustomLabels(self, CustomLabels): # String
self.add_body_params('CustomLabels', CustomLabels)
def get_Notification(self): # Struct
return self.get_query_params().get('Notification')
def set_Notification(self, Notification): # Struct
self.add_query_param("Notification", json.dumps(Notification))
def get_ProjectName(self): # String
return self.get_body_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_body_params('ProjectName', ProjectName)
def get_NotifyTopicName(self): # String
return self.get_body_params().get('NotifyTopicName')
def set_NotifyTopicName(self, NotifyTopicName): # String
self.add_body_params('NotifyTopicName', NotifyTopicName)
def get_StoryType(self): # String
return self.get_body_params().get('StoryType')
def METHOD_NAME(self, StoryType): # String
self.add_body_params('StoryType', StoryType)
def get_Tags(self): # Map
return self.get_query_params().get('Tags')
def set_Tags(self, Tags): # Map
self.add_query_param("Tags", json.dumps(Tags))
def get_StorySubType(self): # String
return self.get_body_params().get('StorySubType')
def set_StorySubType(self, StorySubType): # String
self.add_body_params('StorySubType', StorySubType)
def get_MinFileCount(self): # Long
return self.get_body_params().get('MinFileCount')
def set_MinFileCount(self, MinFileCount): # Long
self.add_body_params('MinFileCount', MinFileCount)
def get_UserData(self): # String
return self.get_query_params().get('UserData')
def set_UserData(self, UserData): # String
self.add_query_param('UserData', UserData)
def get_MaxFileCount(self): # Long
return self.get_body_params().get('MaxFileCount')
def set_MaxFileCount(self, MaxFileCount): # Long
self.add_body_params('MaxFileCount', MaxFileCount)
def get_DatasetName(self): # String
return self.get_body_params().get('DatasetName')
def set_DatasetName(self, DatasetName): # String
self.add_body_params('DatasetName', DatasetName)
def get_StoryStartTime(self): # String
return self.get_body_params().get('StoryStartTime')
def set_StoryStartTime(self, StoryStartTime): # String
self.add_body_params('StoryStartTime', StoryStartTime)
def get_Address(self): # Struct
return self.get_body_params().get('Address')
def set_Address(self, Address): # Struct
self.add_body_params("Address", json.dumps(Address))
def get_CustomId(self): # String
return self.get_body_params().get('CustomId')
def set_CustomId(self, CustomId): # String
self.add_body_params('CustomId', CustomId)
def get_StoryEndTime(self): # String
return self.get_body_params().get('StoryEndTime')
def set_StoryEndTime(self, StoryEndTime): # String
self.add_body_params('StoryEndTime', StoryEndTime)
def get_ObjectId(self): # String
return self.get_body_params().get('ObjectId')
def set_ObjectId(self, ObjectId): # String
self.add_body_params('ObjectId', ObjectId)
def get_StoryName(self): # String
return self.get_body_params().get('StoryName')
def set_StoryName(self, StoryName): # String
self.add_body_params('StoryName', StoryName) | null |
261 | # Copyright 2017-2022 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from future.utils import iteritems
from src.api.base import API
from src.model.metadata_model import MetadataModel
class Metadata(API):
def __init__(self):
super(Metadata, self).__init__()
@classmethod
def find(cls, identifier, entity_class):
api = cls.instance()
response_data = api.call('metadata/find?entityName={}&entityClass={}'.format(identifier,
str(entity_class).upper()), None)
if 'payload' in response_data:
return MetadataModel.METHOD_NAME(response_data['payload'])
if 'status' in response_data and response_data['status'] == "OK":
return MetadataModel()
if 'message' in response_data:
raise RuntimeError(response_data['message'])
else:
raise RuntimeError("Failed to find entity id by entity name.")
@classmethod
def METHOD_NAME(cls, entity_id, entity_class):
return cls.load_all_for_ids([entity_id], entity_class)[0]
@classmethod
def load_all_for_ids(cls, entity_ids, entity_class):
api = cls.instance()
data = json.dumps([cls.convert_to_entity_vo(entity_id, entity_class) for entity_id in entity_ids])
response_data = api.call('metadata/load', data=data, http_method='POST')
if 'payload' in response_data:
return [MetadataModel.METHOD_NAME(response_data_item) for response_data_item in response_data['payload']]
if 'status' in response_data and response_data['status'] == "OK":
return [MetadataModel()]
if 'message' in response_data:
raise RuntimeError(response_data['message'])
else:
raise RuntimeError("Failed to load metadata.")
@classmethod
def load_metadata_mapping(cls, entity_ids, entity_class):
metadata_list = Metadata.load_all_for_ids(entity_ids, entity_class)
metadata_mapping = dict()
for metadata_entry in metadata_list:
metadata_data_dict = {}
for key, data in iteritems(metadata_entry.data):
if 'value' in data:
value = data['value']
if not value.startswith('{'):
metadata_data_dict[key] = value
if len(metadata_data_dict):
metadata_mapping[metadata_entry.entity_id] = metadata_data_dict
return metadata_mapping
@classmethod
def convert_to_entity_vo(cls, entity_id, entity_class):
return {
'entityId': entity_id,
'entityClass': str(entity_class).upper()
}
@classmethod
def update(cls, entity_id, entity_class, metadata):
api = cls.instance()
data = json.dumps({
"entity": {
"entityId": entity_id,
"entityClass": str(entity_class).upper()
},
"data": metadata
})
response_data = api.call('metadata/updateKeys', data=data, http_method='POST')
if 'payload' in response_data:
return MetadataModel.METHOD_NAME(response_data['payload'])
if 'status' in response_data and response_data['status'] == "OK":
return MetadataModel()
if 'message' in response_data:
raise RuntimeError(response_data['message'])
else:
raise RuntimeError("Failed to update metadata.")
@classmethod
def delete(cls, entity_id, entity_class):
api = cls.instance()
data = json.dumps({
'entityId': entity_id,
'entityClass': str(entity_class).upper()
})
response_data = api.call('metadata/delete', data=data, http_method='DELETE')
if 'payload' in response_data:
return MetadataModel.METHOD_NAME(response_data['payload'])
if 'status' in response_data and response_data['status'] == "OK":
return MetadataModel()
if 'message' in response_data:
raise RuntimeError(response_data['message'])
else:
raise RuntimeError("Failed to delete metadata.")
@classmethod
def delete_keys(cls, entity_id, entity_class, metadata):
api = cls.instance()
data = json.dumps({
"entity": {
"entityId": entity_id,
"entityClass": str(entity_class).upper()
},
"data": metadata
})
response_data = api.call('metadata/deleteKeys', data=data, http_method='DELETE')
if 'payload' in response_data:
return MetadataModel.METHOD_NAME(response_data['payload'])
if 'status' in response_data and response_data['status'] == "OK":
return MetadataModel()
if 'message' in response_data:
raise RuntimeError(response_data['message'])
else:
raise RuntimeError("Failed to delete metadata keys.") | null |
262 | import pytest
import env # noqa: F401
from pybind11_tests import ConstructorStats
from pybind11_tests import call_policies as m
@pytest.mark.xfail("env.PYPY", reason="sometimes comes out 1 off on PyPy", strict=False)
def test_keep_alive_argument(capture):
n_inst = ConstructorStats.detail_reg_inst()
with capture:
p = m.Parent()
assert capture == "Allocating parent."
with capture:
p.addChild(m.Child())
assert ConstructorStats.detail_reg_inst() == n_inst + 1
assert (
capture
== """
Allocating child.
Releasing child.
"""
)
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert capture == "Releasing parent."
with capture:
p = m.Parent()
assert capture == "Allocating parent."
with capture:
p.addChildKeepAlive(m.Child())
assert ConstructorStats.detail_reg_inst() == n_inst + 2
assert capture == "Allocating child."
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert (
capture
== """
Releasing parent.
Releasing child.
"""
)
p = m.Parent()
c = m.Child()
assert ConstructorStats.detail_reg_inst() == n_inst + 2
m.free_function(p, c)
del c
assert ConstructorStats.detail_reg_inst() == n_inst + 2
del p
assert ConstructorStats.detail_reg_inst() == n_inst
with pytest.raises(RuntimeError) as excinfo:
m.invalid_arg_index()
assert str(excinfo.value) == "Could not activate keep_alive!"
def test_keep_alive_return_value(capture):
n_inst = ConstructorStats.detail_reg_inst()
with capture:
p = m.Parent()
assert capture == "Allocating parent."
with capture:
p.returnChild()
assert ConstructorStats.detail_reg_inst() == n_inst + 1
assert (
capture
== """
Allocating child.
Releasing child.
"""
)
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert capture == "Releasing parent."
with capture:
p = m.Parent()
assert capture == "Allocating parent."
with capture:
p.returnChildKeepAlive()
assert ConstructorStats.detail_reg_inst() == n_inst + 2
assert capture == "Allocating child."
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert (
capture
== """
Releasing parent.
Releasing child.
"""
)
p = m.Parent()
assert ConstructorStats.detail_reg_inst() == n_inst + 1
with capture:
m.Parent.staticFunction(p)
assert ConstructorStats.detail_reg_inst() == n_inst + 2
assert capture == "Allocating child."
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert (
capture
== """
Releasing parent.
Releasing child.
"""
)
# https://foss.heptapod.net/pypy/pypy/-/issues/2447
@pytest.mark.xfail("env.PYPY", reason="_PyObject_GetDictPtr is unimplemented")
def test_alive_gc(capture):
n_inst = ConstructorStats.detail_reg_inst()
p = m.ParentGC()
p.addChildKeepAlive(m.Child())
assert ConstructorStats.detail_reg_inst() == n_inst + 2
lst = [p]
lst.append(lst) # creates a circular reference
with capture:
del p, lst
assert ConstructorStats.detail_reg_inst() == n_inst
assert (
capture
== """
Releasing parent.
Releasing child.
"""
)
def test_alive_gc_derived(capture):
class Derived(m.Parent):
pass
n_inst = ConstructorStats.detail_reg_inst()
p = Derived()
p.addChildKeepAlive(m.Child())
assert ConstructorStats.detail_reg_inst() == n_inst + 2
lst = [p]
lst.append(lst) # creates a circular reference
with capture:
del p, lst
assert ConstructorStats.detail_reg_inst() == n_inst
assert (
capture
== """
Releasing parent.
Releasing child.
"""
)
def test_alive_gc_multi_derived(capture):
class Derived(m.Parent, m.Child):
def __init__(self):
m.Parent.__init__(self)
m.Child.__init__(self)
n_inst = ConstructorStats.detail_reg_inst()
p = Derived()
p.addChildKeepAlive(m.Child())
# +3 rather than +2 because Derived corresponds to two registered instances
assert ConstructorStats.detail_reg_inst() == n_inst + 3
lst = [p]
lst.append(lst) # creates a circular reference
with capture:
del p, lst
assert ConstructorStats.detail_reg_inst() == n_inst
assert (
capture
== """
Releasing parent.
Releasing child.
Releasing child.
"""
)
def METHOD_NAME(capture):
n_inst = ConstructorStats.detail_reg_inst()
with capture:
p = m.Parent()
assert capture == "Allocating parent."
with capture:
p.returnNullChildKeepAliveChild()
assert ConstructorStats.detail_reg_inst() == n_inst + 1
assert capture == ""
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert capture == "Releasing parent."
with capture:
p = m.Parent()
assert capture == "Allocating parent."
with capture:
p.returnNullChildKeepAliveParent()
assert ConstructorStats.detail_reg_inst() == n_inst + 1
assert capture == ""
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert capture == "Releasing parent."
def test_keep_alive_constructor(capture):
n_inst = ConstructorStats.detail_reg_inst()
with capture:
p = m.Parent(m.Child())
assert ConstructorStats.detail_reg_inst() == n_inst + 2
assert (
capture
== """
Allocating child.
Allocating parent.
"""
)
with capture:
del p
assert ConstructorStats.detail_reg_inst() == n_inst
assert (
capture
== """
Releasing parent.
Releasing child.
"""
)
def test_call_guard():
assert m.unguarded_call() == "unguarded"
assert m.guarded_call() == "guarded"
assert m.multiple_guards_correct_order() == "guarded & guarded"
assert m.multiple_guards_wrong_order() == "unguarded & guarded"
if hasattr(m, "with_gil"):
assert m.with_gil() == "GIL held"
assert m.without_gil() == "GIL released" | null |
263 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
from aliyunsdkedas.endpoint import endpoint_data
class ModifyScalingRuleRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'Edas', '2017-08-01', 'ModifyScalingRule','Edas')
self.set_uri_pattern('/pop/v5/app/scaling_rules')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_InStep(self): # Integer
return self.get_query_params().get('InStep')
def set_InStep(self, InStep): # Integer
self.add_query_param('InStep', InStep)
def get_OutInstanceNum(self): # Integer
return self.get_query_params().get('OutInstanceNum')
def set_OutInstanceNum(self, OutInstanceNum): # Integer
self.add_query_param('OutInstanceNum', OutInstanceNum)
def get_OutRT(self): # Integer
return self.get_query_params().get('OutRT')
def set_OutRT(self, OutRT): # Integer
self.add_query_param('OutRT', OutRT)
def get_InInstanceNum(self): # Integer
return self.get_query_params().get('InInstanceNum')
def set_InInstanceNum(self, InInstanceNum): # Integer
self.add_query_param('InInstanceNum', InInstanceNum)
def get_VSwitchIds(self): # String
return self.get_query_params().get('VSwitchIds')
def set_VSwitchIds(self, VSwitchIds): # String
self.add_query_param('VSwitchIds', VSwitchIds)
def get_TemplateInstanceId(self): # String
return self.get_query_params().get('TemplateInstanceId')
def set_TemplateInstanceId(self, TemplateInstanceId): # String
self.add_query_param('TemplateInstanceId', TemplateInstanceId)
def get_AcceptEULA(self): # Boolean
return self.get_query_params().get('AcceptEULA')
def set_AcceptEULA(self, AcceptEULA): # Boolean
self.add_query_param('AcceptEULA', AcceptEULA)
def get_OutStep(self): # Integer
return self.get_query_params().get('OutStep')
def set_OutStep(self, OutStep): # Integer
self.add_query_param('OutStep', OutStep)
def get_OutCPU(self): # Integer
return self.get_query_params().get('OutCPU')
def set_OutCPU(self, OutCPU): # Integer
self.add_query_param('OutCPU', OutCPU)
def get_KeyPairName(self): # String
return self.get_query_params().get('KeyPairName')
def set_KeyPairName(self, KeyPairName): # String
self.add_query_param('KeyPairName', KeyPairName)
def get_Password(self): # String
return self.get_query_params().get('Password')
def set_Password(self, Password): # String
self.add_query_param('Password', Password)
def get_TemplateVersion(self): # Integer
return self.get_query_params().get('TemplateVersion')
def set_TemplateVersion(self, TemplateVersion): # Integer
self.add_query_param('TemplateVersion', TemplateVersion)
def get_InCondition(self): # String
return self.get_query_params().get('InCondition')
def set_InCondition(self, InCondition): # String
self.add_query_param('InCondition', InCondition)
def get_InRT(self): # Integer
return self.get_query_params().get('InRT')
def set_InRT(self, InRT): # Integer
self.add_query_param('InRT', InRT)
def get_InCpu(self): # Integer
return self.get_query_params().get('InCpu')
def set_InCpu(self, InCpu): # Integer
self.add_query_param('InCpu', InCpu)
def get_OutDuration(self): # Integer
return self.get_query_params().get('OutDuration')
def set_OutDuration(self, OutDuration): # Integer
self.add_query_param('OutDuration', OutDuration)
def get_MultiAzPolicy(self): # String
return self.get_query_params().get('MultiAzPolicy')
def set_MultiAzPolicy(self, MultiAzPolicy): # String
self.add_query_param('MultiAzPolicy', MultiAzPolicy)
def get_OutLoad(self): # Integer
return self.get_query_params().get('OutLoad')
def set_OutLoad(self, OutLoad): # Integer
self.add_query_param('OutLoad', OutLoad)
def get_InLoad(self): # Integer
return self.get_query_params().get('InLoad')
def set_InLoad(self, InLoad): # Integer
self.add_query_param('InLoad', InLoad)
def get_GroupId(self): # String
return self.get_query_params().get('GroupId')
def set_GroupId(self, GroupId): # String
self.add_query_param('GroupId', GroupId)
def get_ResourceFrom(self): # String
return self.get_query_params().get('ResourceFrom')
def set_ResourceFrom(self, ResourceFrom): # String
self.add_query_param('ResourceFrom', ResourceFrom)
def get_OutEnable(self): # Boolean
return self.get_query_params().get('OutEnable')
def set_OutEnable(self, OutEnable): # Boolean
self.add_query_param('OutEnable', OutEnable)
def get_TemplateId(self): # String
return self.get_query_params().get('TemplateId')
def set_TemplateId(self, TemplateId): # String
self.add_query_param('TemplateId', TemplateId)
def get_ScalingPolicy(self): # String
return self.get_query_params().get('ScalingPolicy')
def set_ScalingPolicy(self, ScalingPolicy): # String
self.add_query_param('ScalingPolicy', ScalingPolicy)
def METHOD_NAME(self): # String
return self.get_query_params().get('OutCondition')
def set_OutCondition(self, OutCondition): # String
self.add_query_param('OutCondition', OutCondition)
def get_InDuration(self): # Integer
return self.get_query_params().get('InDuration')
def set_InDuration(self, InDuration): # Integer
self.add_query_param('InDuration', InDuration)
def get_InEnable(self): # Boolean
return self.get_query_params().get('InEnable')
def set_InEnable(self, InEnable): # Boolean
self.add_query_param('InEnable', InEnable)
def get_AppId(self): # String
return self.get_query_params().get('AppId')
def set_AppId(self, AppId): # String
self.add_query_param('AppId', AppId)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_TemplateInstanceName(self): # String
return self.get_query_params().get('TemplateInstanceName')
def set_TemplateInstanceName(self, TemplateInstanceName): # String
self.add_query_param('TemplateInstanceName', TemplateInstanceName) | null |
264 | import logging
import os
from flask_appbuilder import SQLA
from flask_appbuilder.models.sqla.interface import SQLAInterface
from .base import FABTestCase
from .const import MAX_PAGE_SIZE, PASSWORD_ADMIN, USERNAME_ADMIN
from .sqla.models import Model1
log = logging.getLogger(__name__)
class FlaskTestCase(FABTestCase):
def setUp(self):
from flask import Flask
from flask_appbuilder import AppBuilder
from flask_appbuilder.views import ModelView
self.app = Flask(__name__)
self.basedir = os.path.abspath(os.path.dirname(__file__))
self.app.config.from_object("tests.config_api")
self.app.config["FAB_API_MAX_PAGE_SIZE"] = MAX_PAGE_SIZE
self.db = SQLA(self.app)
self.appbuilder = AppBuilder(self.app, self.db.session)
class Model1View(ModelView):
datamodel = SQLAInterface(Model1)
context = self
context._conditional_value = True
class Model1ViewDynamic(ModelView):
datamodel = SQLAInterface(Model1)
self.appbuilder.add_view(Model1View, "Model1")
self.appbuilder.add_view(
Model1ViewDynamic,
"Model1Dynamic",
label="Model1 Dynamic",
menu_cond=lambda: context._conditional_value,
)
def tearDown(self):
self.appbuilder = None
self.app = None
self.db = None
def test_menu_access_denied(self):
"""
REST Api: Test menu logged out access denied
:return:
"""
uri = "/api/v1/menu/"
client = self.app.test_client()
# as logged out user
rv = client.get(uri)
self.assertEqual(rv.status_code, 401)
def test_menu_api(self):
"""
REST Api: Test menu data
"""
uri = "/api/v1/menu/"
client = self.app.test_client()
# Enable Model1Dynamic
self._conditional_value = True
token = self.login(client, USERNAME_ADMIN, PASSWORD_ADMIN)
rv = self.auth_client_get(client, token, uri)
self.assertEqual(rv.status_code, 200)
data = rv.data.decode("utf-8")
self.assertIn("Security", data)
self.assertIn("Model1", data)
self.assertIn("Model1Dynamic", data)
def test_menu_api_limited(self):
"""
REST Api: Test limited menu data
"""
limited_user = "user1"
limited_password = "user1"
limited_role = "Limited"
role = self.appbuilder.sm.add_role(limited_role)
pvm = self.appbuilder.sm.find_permission_view_menu("menu_access", "Model1")
self.appbuilder.sm.add_permission_role(role, pvm)
pvm = self.appbuilder.sm.find_permission_view_menu(
"menu_access", "Model1Dynamic"
)
self.appbuilder.sm.add_permission_role(role, pvm)
pvm = self.appbuilder.sm.find_permission_view_menu("can_get", "MenuApi")
self.appbuilder.sm.add_permission_role(role, pvm)
self.appbuilder.sm.add_user(
limited_user, "user1", "user1", "[email protected]", role, limited_password
)
uri = "/api/v1/menu/"
client = self.app.test_client()
# as limited user
token = self.login(client, limited_user, limited_password)
# Enable Model1Dynamic
self._conditional_value = True
rv = self.auth_client_get(client, token, uri)
self.assertEqual(rv.status_code, 200)
data = rv.data.decode("utf-8")
self.assertNotIn("Security", data)
self.assertIn("Model1", data)
self.assertIn("Model1Dynamic", data)
# Disable Model1Dynamic
self._conditional_value = False
rv = self.auth_client_get(client, token, uri)
self.assertEqual(rv.status_code, 200)
data = rv.data.decode("utf-8")
self.assertNotIn("Security", data)
self.assertIn("Model1", data)
self.assertNotIn("Model1Dynamic", data)
self.browser_logout(client)
# Revert test data
self.appbuilder.get_session.delete(
self.appbuilder.sm.find_user(username=limited_user)
)
self.appbuilder.get_session.delete(self.appbuilder.sm.find_role(limited_role))
self.appbuilder.get_session.commit()
def test_menu_api_public(self):
"""
REST Api: Test public menu data
"""
role = self.appbuilder.sm.find_role("Public")
pvm = self.appbuilder.sm.find_permission_view_menu("menu_access", "Model1")
self.appbuilder.sm.add_permission_role(role, pvm)
pvm = self.appbuilder.sm.find_permission_view_menu(
"menu_access", "Model1Dynamic"
)
self.appbuilder.sm.add_permission_role(role, pvm)
pvm = self.appbuilder.sm.find_permission_view_menu("can_get", "MenuApi")
self.appbuilder.sm.add_permission_role(role, pvm)
# Enable Model1Dynamic
self._conditional_value = True
uri = "/api/v1/menu/"
client = self.app.test_client()
rv = client.get(uri)
self.assertEqual(rv.status_code, 200)
data = rv.data.decode("utf-8")
self.assertIn("Model1", data)
self.assertIn("Model1Dynamic", data)
# Disable Model1Dynamic
self._conditional_value = False
uri = "/api/v1/menu/"
client = self.app.test_client()
rv = client.get(uri)
self.assertEqual(rv.status_code, 200)
data = rv.data.decode("utf-8")
self.assertIn("Model1", data)
self.assertNotIn("Model1Dynamic", data)
# Revert test data
role = self.appbuilder.sm.find_role("Public")
role.permissions = []
self.appbuilder.get_session.commit()
def METHOD_NAME(self):
"""
REST Api: Test redirect after logout
"""
limited_user = "user1"
limited_password = "user1"
client = self.app.test_client()
self.login(client, limited_user, limited_password)
rv = self.browser_logout(client)
# make sure that browser is redirected to /
self.assertEqual(rv.headers["Location"].split("/")[-1], "")
self.login(client, limited_user, limited_password)
self.app.config["LOGOUT_REDIRECT_URL"] = "/logged_out"
rv = self.browser_logout(client)
# make sure that browser is redirected to LOGOUT_REDIRECT_URL
self.assertEqual(rv.headers["Location"].split("/")[-1], "logged_out") | null |
265 | from __future__ import print_function
import numpy as np
try:
import scipy.special
except ImportError:
scipy = None
import IMP
import IMP.test
import IMP.algebra
import pickle
class UnitSimplexDTests(IMP.test.TestCase):
types = [
(1, IMP.algebra.UnitSimplex1D, (), IMP.algebra.Vector1D),
(2, IMP.algebra.UnitSimplex2D, (), IMP.algebra.Vector2D),
(3, IMP.algebra.UnitSimplex3D, (), IMP.algebra.Vector3D),
(4, IMP.algebra.UnitSimplex4D, (), IMP.algebra.Vector4D),
(5, IMP.algebra.UnitSimplex5D, (), IMP.algebra.Vector5D),
(6, IMP.algebra.UnitSimplex6D, (), IMP.algebra.Vector6D),
]
types += [
(d, IMP.algebra.UnitSimplexKD, (d,), IMP.algebra.VectorKD)
for d in range(1, 11)
]
@staticmethod
def METHOD_NAME(tailprob, sigma=1, dim=1):
alpha = (1 - tailprob) ** dim
return sigma * np.sqrt(2) * scipy.special.erfinv(alpha)
def test_construction(self):
"""Check that fixed-dimension simplices are constructed correctly"""
for d, st, args, vt in self.types:
s = st(*args)
self.assertEqual(s.get_dimension(), d)
def test_construct_kd_with_wrong_dimension_raises_error(self):
self.assertRaisesUsageException(IMP.algebra.UnitSimplexKD, 0)
self.assertRaisesUsageException(IMP.algebra.UnitSimplexKD, -1)
def test_get_barycenter(self):
for d, st, args, vt in self.types:
s = st(*args)
v = s.get_barycenter()
self.assertIsInstance(v, vt)
self.assertSequenceAlmostEqual(list(s.get_barycenter()), [1.0 / d] * d)
def test_get_contains(self):
for d, st, args, vt in self.types:
s = st(*args)
for i in range(10):
if isinstance(vt, IMP.algebra.VectorKD):
v = -np.log(np.random.uniform(size=d + 1))
v /= np.sum(v)
self.assertFalse(s.get_contains(vt(v)))
v = -np.log(np.random.uniform(size=d))
self.assertFalse(s.get_contains(vt(v)))
v /= np.sum(v)
self.assertTrue(s.get_contains(vt(v)))
def test_get_vertices(self):
for d, st, args, vt in self.types:
s = st(*args)
vs = IMP.algebra.get_vertices(s)
I = np.eye(d)
self.assertEqual(len(vs), d)
for i, v in enumerate(vs):
self.assertIsInstance(v, vt)
self.assertSequenceAlmostEqual(list(v), list(I[i, :]))
def test_get_increasing_from_embedded(self):
for d, st, args, vt in self.types:
s = st(*args)
for i in range(10):
v = -np.log(np.random.uniform(size=d))
v /= np.sum(v)
inc = IMP.algebra.get_increasing_from_embedded(s, vt(v))
self.assertIsInstance(inc, vt)
self.assertSequenceAlmostEqual(list(inc), list(np.cumsum(v)))
def test_get_embedded_from_increasing(self):
for d, st, args, vt in self.types:
s = st(*args)
for i in range(10):
v = -np.log(np.random.uniform(size=d))
v /= np.sum(v)
inc = np.cumsum(v)
v2 = IMP.algebra.get_embedded_from_increasing(s, vt(inc))
self.assertIsInstance(v2, vt)
self.assertSequenceAlmostEqual(list(v2), list(v))
def test_get_projected(self):
for d, st, args, vt in self.types:
s = st(*args)
v = np.random.normal(size=d)
v_proj = IMP.algebra.get_projected(s, vt(v))
self.assertIsInstance(v_proj, vt)
v_proj = np.array(v_proj, dtype=np.double)
pos_inds = v_proj != 0.0
vshift = v[pos_inds] - v_proj[pos_inds]
self.assertTrue(np.all(v_proj >= 0))
self.assertAlmostEqual(np.sum(v_proj), 1)
# projection has cut point
if len(v[~pos_inds]) > 0:
min_pos = np.amin(v[pos_inds])
max_zero = np.amax(v[~pos_inds])
self.assertGreater(min_pos, max_zero)
# projection is rigid shift
self.assertSequenceAlmostEqual(
list(vshift), [vshift[0]] * len(vshift)
)
def test_get_random_vector_on(self):
for d, st, args, vt in self.types:
s = st(*args)
for i in range(10):
v = IMP.algebra.get_random_vector_on(s)
self.assertIsInstance(v, vt)
self.assertEqual(v.get_dimension(), d)
print(v)
print(np.sum(list(v)))
self.assertAlmostEqual(np.sum(v), 1)
@IMP.test.skipIf(scipy is None, "Requires SciPy")
def test_get_random_vector_on_is_uniform(self):
"""Test that result of get_random_vector_on is uniform on simplex.
Checks that each component of the Monte Carlo estimate of the mean
follows the central limit theorem.
"""
n = 1000
fail_prob = 1e-3 # Probability of all tests failing.
each_fail_prob = 1 - (1 - fail_prob) ** (1.0 / len(self.types))
for d, st, args, vt in self.types:
s = st(*args)
bary_vs = []
c = s.get_barycenter()
for i in range(n):
v = IMP.algebra.get_random_vector_on(s)
bary_vs.append(np.array(v - c, dtype=np.double))
if scipy:
mean_bary_vs = np.mean(bary_vs, axis=0)
mcse = ((d - 1.0) / (d + 1.0) / n) ** 0.5 / d
mean_thresh = self.METHOD_NAME(
each_fail_prob, dim=d, sigma=mcse
)
for i in range(d):
self.assertLessEqual(mean_bary_vs[i], mean_thresh)
def pickle(self):
"""Test (un-)pickle of UnitSimplexD"""
for d, st, args, vt in self.types:
s1 = st(*args)
s2 = st(*args)
s2.foo = 'bar'
dump = pickle.dumps((s1, s2))
news1, news2 = pickle.loads(dump)
self.assertLess(IMP.algebra.get_distance(
s1.get_barycenter(), news1.get_barycenter()), 1e-4)
self.assertLess(IMP.algebra.get_distance(
s2.get_barycenter(), news2.get_barycenter()), 1e-4)
self.assertEqual(news2.foo, 'bar')
self.assertRaises(TypeError, s1._set_from_binary, 42)
if __name__ == "__main__":
IMP.test.main() | null |
266 | import traceback
from shared.regular.regular_api import *
from shared.connection.connectors.connectors_base import Connector, with_connection
from shared.regular import regular_log
from pymongo import MongoClient
from bson import ObjectId
def with_mongodb_exception_handler(f):
def wrapper(*args):
log = regular_log.default()
try:
return f(*args)
except Exception as e:
log['error']['exception_details'] = str(e)
return {'log': log}
return wrapper
class MongoDBConnector(Connector):
url_signer_service: str or None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.name = "mongo_db"
def connect(self):
log = regular_log.default()
try:
if 'client_secret' not in self.auth_data:
log['error']['client_secret'] = 'auth_data must provide a client_secret.'
return {'log': log}
self.connection_client = MongoClient(self.auth_data.get('client_secret'))
return {'result': True}
except Exception as e:
log['error'][
'auth_credentials'] = 'Error connecting to Mongo DB. Please check you connection URL is correct.'
return {'log': log}
@with_connection
@with_mongodb_exception_handler
def __get_db_list(self, opts):
"""
:param opts:
:return:
"""
# List the available databases
database_names = self.connection_client.list_database_names()
# Print the database names
result = []
for db_name in database_names:
result.append(db_name)
return {'data': result}
@with_connection
@with_mongodb_exception_handler
def __list_collections_from_db(self, opts):
"""Upload a file to diffgram from an S3 bucket
:param s3_file_key: path of file to fetch from
:return: file obj if file was uploaded, else False
"""
spec_list = [{'db_name': str}]
log = regular_log.default()
log, input = regular_input.input_check_many(untrusted_input = opts,
spec_list = spec_list,
log = log)
if len(log["error"].keys()) >= 1:
return {'log': log}
database = input['db_name']
# Select the database
db = self.connection_client[database]
# List the collections (tables) in the database
collection_names = db.list_collection_names()
return {'data': collection_names}
@with_connection
@with_mongodb_exception_handler
def __get_documents(self, opts):
"""
Get documents from a collection
:param opts:
:return:
"""
spec_list = [{'db_name': str},
{'collection_name': str},
{'exclude_id_list': {'required': False, 'type': list, 'allow_empty': True}},
{'reference_id': str}]
log = regular_log.default()
log, input = regular_input.input_check_many(untrusted_input = opts,
spec_list = spec_list,
log = log)
if len(log["error"].keys()) >= 1:
return {'log': log}
database = input['db_name']
collection_name = input['collection_name']
exclude_id_list = input['exclude_id_list']
reference_id = input['reference_id']
# Select the database
db = self.connection_client[database]
collection = db[collection_name]
query = {}
if exclude_id_list:
if reference_id == '_id':
exclude_id_list = [ObjectId(id) for id in exclude_id_list]
query[reference_id] = {'$nin': exclude_id_list}
items = collection.find(query)
return {'data': list(items)}
def METHOD_NAME(self):
auth_result = self.connect()
if 'log' in auth_result:
return auth_result
try:
# Test mongo db connection with a small admin command
self.connection_client.admin.command('ismaster')
except Exception as e:
log = regular_log.default()
log['error']['connection'] = 'Error connecting to MongoDB. Please check you connection url is correct.'
log['error']['details'] = traceback.format_exc()
return {'log': log}
return auth_result
@with_connection
def get_meta_data(self):
return {}
@with_connection
def fetch_data(self, opts):
"""
This function routes any action_type to the correct S3 connector actions.
:return: Object
"""
if 'action_type' not in opts:
raise Exception('Provide action_type key.')
if 'event_data' not in opts:
raise Exception('Provide event_data key.')
action_type = opts.pop('action_type')
if action_type == 'get_db_list':
return self.__get_db_list(opts)
if action_type == 'list_collections_from_db':
return self.__list_collections_from_db(opts)
if action_type == 'get_documents':
return self.__get_documents(opts)
@with_connection
def put_data(self, opts):
if 'action_type' not in opts:
raise Exception('Provide action_type key.')
if 'event_data' not in opts:
raise Exception('Provide event_data key.')
action_type = opts.pop('action_type')
if action_type == 'send_export':
return self.__send_export(opts) | null |
267 | """
Tool Input Translation.
"""
import logging
from galaxy.util.bunch import Bunch
log = logging.getLogger(__name__)
class ToolInputTranslator:
"""
Handles Tool input translation.
This is used for data source tools
>>> from galaxy.util import Params, XML
>>> translator = ToolInputTranslator.from_element(XML(
... '''
... <request_param_translation>
... <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
... <request_param galaxy_name="URL" remote_name="URL" missing="" >
... <append_param separator="&" first_separator="?" join="=">
... <value name="_export" missing="1" />
... <value name="GALAXY_URL" missing="0" />
... </append_param>
... </request_param>
... <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
... <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
... <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
... <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
... <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="tabular" >
... <value_translation>
... <value galaxy_value="tabular" remote_value="primaryTable" />
... <value galaxy_value="tabular" remote_value="selectedFields" />
... <value galaxy_value="wig" remote_value="wigData" />
... <value galaxy_value="interval" remote_value="tab" />
... <value galaxy_value="html" remote_value="hyperlinks" />
... <value galaxy_value="fasta" remote_value="sequence" />
... </value_translation>
... </request_param>
... </request_param_translation>
... '''))
>>> params = Params({'db':'hg17', 'URL':'URL_value', 'org':'Human', 'hgta_outputType':'primaryTable'})
>>> translator.translate(params)
>>> print(sorted(params.__dict__.keys()))
['URL', 'URL_method', 'data_type', 'db', 'dbkey', 'description', 'hgta_outputType', 'org', 'organism', 'table']
>>> params.get('URL', None) in ['URL_value?GALAXY_URL=0&_export=1', 'URL_value?_export=1&GALAXY_URL=0']
True
"""
@classmethod
def from_element(cls, elem):
"""Loads the proper filter by the type attribute of elem"""
rval = ToolInputTranslator()
for req_param in elem.findall("request_param"):
# req_param tags must look like <request_param galaxy_name="dbkey" remote_name="GENOME" missing="" />
# trans_list = []
remote_name = req_param.get("remote_name")
galaxy_name = req_param.get("galaxy_name")
missing = req_param.get("missing")
value_trans = {}
append_param = None
value_trans_elem = req_param.find("value_translation")
if value_trans_elem is not None:
for value_elem in value_trans_elem.findall("value"):
remote_value = value_elem.get("remote_value")
galaxy_value = value_elem.get("galaxy_value")
if None not in [remote_value, galaxy_value]:
value_trans[remote_value] = galaxy_value
append_param_elem = req_param.find("append_param")
if append_param_elem is not None:
separator = append_param_elem.get("separator", ",")
first_separator = append_param_elem.get("first_separator", None)
join_str = append_param_elem.get("join", "=")
append_dict = {}
for value_elem in append_param_elem.findall("value"):
value_name = value_elem.get("name")
value_missing = value_elem.get("missing")
if None not in [value_name, value_missing]:
append_dict[value_name] = value_missing
append_param = Bunch(
separator=separator, first_separator=first_separator, join_str=join_str, append_dict=append_dict
)
rval.param_trans_dict[remote_name] = Bunch(
galaxy_name=galaxy_name, missing=missing, value_trans=value_trans, append_param=append_param
)
return rval
def __init__(self):
self.param_trans_dict = {}
def METHOD_NAME(self, params):
"""
update params in-place
"""
for remote_name, translator in self.param_trans_dict.items():
galaxy_name = (
translator.galaxy_name
) # NB: if a param by name galaxy_name is provided, it is always thrown away unless galaxy_name == remote_name
value = params.get(
remote_name, translator.missing
) # get value from input params, or use default value specified in tool config
if translator.value_trans and value in translator.value_trans:
value = translator.value_trans[value]
if translator.append_param:
for param_name, missing_value in translator.append_param.append_dict.items():
param_value = params.get(param_name, missing_value)
if translator.append_param.first_separator and translator.append_param.first_separator not in value:
sep = translator.append_param.first_separator
else:
sep = translator.append_param.separator
value += f"{sep}{param_name}{translator.append_param.join_str}{param_value}"
params.update({galaxy_name: value}) | null |
268 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import json
import os
import re
import shutil
import subprocess
import structlog
import yaml
from taskcluster.helper import TaskclusterConfig
taskcluster = TaskclusterConfig("https://community-tc.services.mozilla.com")
logger = structlog.getLogger(__name__)
ROOT = os.path.realpath(os.path.dirname(__file__))
def configure():
"""
Load configuration from CLI args and Taskcluster secrets
"""
parser = argparse.ArgumentParser(description="Run code-review integration tests")
parser.add_argument(
"-c",
"--configuration",
help="Local configuration file replacing Taskcluster secrets",
type=open,
)
parser.add_argument(
"--clone-dir",
help="Directory where to clone repositories",
default=os.environ.get("CLONE_DIR", os.path.join(ROOT, "clone")),
)
parser.add_argument(
"--taskcluster-secret",
help="Taskcluster Secret path",
default=os.environ.get("TASKCLUSTER_SECRET"),
)
args = parser.parse_args()
taskcluster.auth()
taskcluster.load_secrets(
args.taskcluster_secret,
required=("phabricator", "admins"),
existing={"admins": ["[email protected]"]},
local_secrets=yaml.safe_load(args.configuration)
if args.configuration
else None,
)
# Make sure the clone dir is available
os.makedirs(args.clone_dir, exist_ok=True)
# Check the url is correctly formatted
assert taskcluster.secrets["phabricator"]["url"].endswith(
"/api/"
), "Phabricator url must end in /api/"
return args
def METHOD_NAME(url, directory, branch="tip"):
"""
Mercurial clone with robustcheckout
"""
logger.info("Cloning repository", url=url, dir=directory)
# Parent should exist, not current dir
assert os.path.exists(os.path.dirname(directory)), "Missing parent of clone dir"
# Cleanup existing target
if os.path.exists(directory):
logger.info("Removing previous clone")
shutil.rmtree(directory)
# Now let's clone
cmd = [
"hg",
"robustcheckout",
"--purge",
f"--sharebase={directory}-shared",
f"--branch={branch}",
url,
directory,
]
subprocess.check_output(cmd)
def tip(repo_dir):
"""
Get the tip of the repo
"""
cmd = ["hg", "tip", "--template={rev}"]
rev = subprocess.check_output(cmd, cwd=repo_dir)
return int(rev)
def patch(filename, repo_dir, message):
"""
Apply a locally stored patch on the repository
and commit the difference
"""
assert os.path.isdir(repo_dir), f"Not a directory {repo_dir}"
path = os.path.join(ROOT, "patches", filename)
assert os.path.exists(path), f"Missing patch {path}"
logger.info("Applying patch", name=filename, dir=repo_dir)
cmd = [
"hg",
"import",
"--user=code-review-integration",
f"--message={message}",
path,
]
subprocess.check_output(cmd, cwd=repo_dir)
# Load revision created
rev = tip(repo_dir)
logger.info("Committed a new revision", id=rev)
return rev
def publish(repo_dir, repo_callsign, revision):
"""
Publish diff on Phabricator
from the base of the repository
"""
def _dump(path, payload):
if os.path.exists(path):
logger.info("Skip overriding arc config", path=path)
return
with open(path, "w") as f:
json.dump(payload, f, indent=4, sort_keys=True)
logger.info("Setup arc configuration", path=path)
# Write arcrc config files
phab_url = taskcluster.secrets["phabricator"]["url"]
base_url = phab_url.replace("/api/", "/")
phab_token = taskcluster.secrets["phabricator"]["token"]
_dump(os.path.expanduser("~/.arcrc"), {"hosts": {phab_url: {"token": phab_token}}})
_dump(
os.path.join(repo_dir, ".hg", ".arcconfig"),
{"repository.callsign": repo_callsign, "phabricator.uri": base_url},
)
logger.info(
"Publishing a revision on phabricator", url=phab_url, local_revision=revision
)
cmd = ["moz-phab", "submit", "--yes", "--no-lint", "--no-bug", f"{revision}"]
output = subprocess.check_output(cmd, cwd=repo_dir)
# Parse output to get the revision url on the last line
last_line = output.splitlines()[-1]
match = re.search(rf"^-> ({base_url}D\d+)$", last_line.decode("utf-8"))
assert match is not None, f"No revision found in moz-phab output:\n{output}"
return match.group(1)
def notify(message):
"""
Notify admins through email
"""
notify = taskcluster.get_service("notify")
for email in taskcluster.secrets["admins"]:
logger.info("Sending email", to=email)
notify.email(
{
"address": email,
"subject": "Code review integration test",
"content": message,
}
)
if __name__ == "__main__":
logger.info("Running integration test")
args = configure()
# Clone NSS for a shorter time than MC
nss = os.path.join(args.clone_dir, "nss")
METHOD_NAME("https://hg.mozilla.org/projects/nss", nss)
base = tip(nss)
# Apply a specific patch on the NSS clone
revision = patch("nss.diff", nss, "Bug XXYYZZ - Code review integration test")
# Submit commit on Phabricator instance
url = publish(nss, "NSS", revision)
# Send notification to admins
notify(f"New code-review integration test: {url}")
logger.info("All done !") | null |
269 | ##########################################################################
#
# Copyright (c) 2008, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import maya.cmds
import maya.mel
## A base class to help in creating custom attribute editor controls
# in a nice object oriented manner. After deriving from this class you
# can instantiate a control from an attribute editor template using mel
# of the following form :
#
# ieAttributeEditorControl( "DerivedClassName", "attributeName" )
class AttributeEditorControl :
## Derived classes should first call the base class __init__, before
# building their ui.
def __init__( self, attribute ) :
self.__nodeName = attribute.split( "." )[0]
self.__attributeName = attribute
## Derived classes should first call the base class replace, before
# reattaching their ui to the new attribute.
def replace( self, attribute ) :
self.__nodeName = attribute.split( "." )[0]
self.__attributeName = attribute
## Returns the name of the node this ui is used for.
def METHOD_NAME( self ) :
return self.__nodeName
## Returns the name of the attribute this ui is used for.
def attributeName( self ) :
return self.__attributeName
@staticmethod
def _new( className, attribute ) :
# we smuggle the class name as a fake attribute name so we
# need to get it back out now.
className = ".".join( className.split( "." )[1:] )
# the class name might also be in a namespace that isn't imported
# in this scope. so import it.
if not "." in className :
cls = eval( className )
else :
names = className.split( "." )
namespace = __import__( ".".join( names[:-1] ) )
cls = getattr( namespace, names[-1] )
parent = maya.cmds.setParent( q=True )
control = cls( attribute )
maya.cmds.setParent( parent )
AttributeEditorControl.__instances[parent] = control
# Script jobs aren't available from maya.cmds. Maya Python bindings generate swig warnings
# such as "swig/python detected a memory leak of type 'MCallbackId *', no destructor found"
maya.mel.eval( 'scriptJob -protected -uiDeleted "%s" "python \\"IECoreMaya.AttributeEditorControl._uiDeleted( \'%s\' )\\""' % ( parent, parent ) )
@staticmethod
def _replace( attribute ) :
parent = maya.cmds.setParent( q=True )
control = AttributeEditorControl.__instances[parent]
control.replace( attribute )
@staticmethod
def _uiDeleted( parent ) :
del AttributeEditorControl.__instances[parent]
# Maps from parent ui names to AttributeEditorControl instances
__instances = {} | null |
270 | import numpy as np
from sklearn.base import clone
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.utils import check_random_state
from joblib import Parallel, delayed
def _parallel_fit(regressor, X, y):
return regressor.fit(X, y)
class GradientBoostingQuantileRegressor(BaseEstimator, RegressorMixin):
"""Predict several quantiles with one estimator.
This is a wrapper around `GradientBoostingRegressor`'s quantile
regression that allows you to predict several `quantiles` in
one go.
Parameters
----------
quantiles : array-like
Quantiles to predict. By default the 16, 50 and 84%
quantiles are predicted.
base_estimator : GradientBoostingRegressor instance or None (default)
Quantile regressor used to make predictions. Only instances
of `GradientBoostingRegressor` are supported. Use this to change
the hyper-parameters of the estimator.
n_jobs : int, default=1
The number of jobs to run in parallel for `fit`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance, or None (default)
Set random state to something other than None for reproducible
results.
"""
def __init__(self, quantiles=[0.16, 0.5, 0.84], base_estimator=None,
n_jobs=1, random_state=None):
self.quantiles = quantiles
self.random_state = random_state
self.base_estimator = base_estimator
self.n_jobs = n_jobs
def fit(self, X, y):
"""Fit one regressor for each quantile.
Parameters
----------
X : array-like, shape=(n_samples, n_features)
Training vectors, where `n_samples` is the number of samples
and `n_features` is the number of features.
y : array-like, shape=(n_samples,)
Target values (real numbers in regression)
"""
rng = check_random_state(self.random_state)
if self.base_estimator is None:
base_estimator = GradientBoostingRegressor(loss='quantile')
else:
base_estimator = self.base_estimator
if not isinstance(base_estimator, GradientBoostingRegressor):
raise ValueError('base_estimator has to be of type'
' GradientBoostingRegressor.')
if not base_estimator.loss == 'quantile':
raise ValueError('base_estimator has to use quantile'
' loss not %s' % base_estimator.loss)
# The predictions for different quantiles should be sorted.
# Therefore each of the regressors need the same seed.
base_estimator.set_params(random_state=rng)
regressors = []
for q in self.quantiles:
regressor = clone(base_estimator)
regressor.set_params(alpha=q)
regressors.append(regressor)
self.regressors_ = Parallel(n_jobs=self.n_jobs, backend='threading')(
delayed(_parallel_fit)(regressor, X, y)
for regressor in regressors)
return self
def METHOD_NAME(self, X, return_std=False, return_quantiles=False):
"""Predict.
Predict `X` at every quantile if `return_std` is set to False.
If `return_std` is set to True, then return the mean
and the predicted standard deviation, which is approximated as
the (0.84th quantile - 0.16th quantile) divided by 2.0
Parameters
----------
X : array-like, shape=(n_samples, n_features)
where `n_samples` is the number of samples
and `n_features` is the number of features.
"""
predicted_quantiles = np.asarray(
[rgr.METHOD_NAME(X) for rgr in self.regressors_])
if return_quantiles:
return predicted_quantiles.T
elif return_std:
std_quantiles = [0.16, 0.5, 0.84]
is_present_mask = np.in1d(std_quantiles, self.quantiles)
if not np.all(is_present_mask):
raise ValueError(
"return_std works only if the quantiles during "
"instantiation include 0.16, 0.5 and 0.84")
low = self.regressors_[self.quantiles.index(0.16)].METHOD_NAME(X)
high = self.regressors_[self.quantiles.index(0.84)].METHOD_NAME(X)
mean = self.regressors_[self.quantiles.index(0.5)].METHOD_NAME(X)
return mean, ((high - low) / 2.0)
# return the mean
return self.regressors_[self.quantiles.index(0.5)].METHOD_NAME(X) | null |
271 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkga.endpoint import endpoint_data
class CreateEndpointGroupRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ga', '2019-11-20', 'CreateEndpointGroup','gaplus')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_PortOverridess(self): # RepeatList
return self.get_query_params().get('PortOverrides')
def set_PortOverridess(self, PortOverrides): # RepeatList
for depth1 in range(len(PortOverrides)):
if PortOverrides[depth1].get('ListenerPort') is not None:
self.add_query_param('PortOverrides.' + str(depth1 + 1) + '.ListenerPort', PortOverrides[depth1].get('ListenerPort'))
if PortOverrides[depth1].get('EndpointPort') is not None:
self.add_query_param('PortOverrides.' + str(depth1 + 1) + '.EndpointPort', PortOverrides[depth1].get('EndpointPort'))
def get_HealthCheckEnabled(self): # Boolean
return self.get_query_params().get('HealthCheckEnabled')
def set_HealthCheckEnabled(self, HealthCheckEnabled): # Boolean
self.add_query_param('HealthCheckEnabled', HealthCheckEnabled)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_HealthCheckIntervalSeconds(self): # Integer
return self.get_query_params().get('HealthCheckIntervalSeconds')
def set_HealthCheckIntervalSeconds(self, HealthCheckIntervalSeconds): # Integer
self.add_query_param('HealthCheckIntervalSeconds', HealthCheckIntervalSeconds)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_HealthCheckProtocol(self): # String
return self.get_query_params().get('HealthCheckProtocol')
def set_HealthCheckProtocol(self, HealthCheckProtocol): # String
self.add_query_param('HealthCheckProtocol', HealthCheckProtocol)
def get_EndpointRequestProtocol(self): # String
return self.get_query_params().get('EndpointRequestProtocol')
def set_EndpointRequestProtocol(self, EndpointRequestProtocol): # String
self.add_query_param('EndpointRequestProtocol', EndpointRequestProtocol)
def get_ListenerId(self): # String
return self.get_query_params().get('ListenerId')
def set_ListenerId(self, ListenerId): # String
self.add_query_param('ListenerId', ListenerId)
def get_HealthCheckPath(self): # String
return self.get_query_params().get('HealthCheckPath')
def set_HealthCheckPath(self, HealthCheckPath): # String
self.add_query_param('HealthCheckPath', HealthCheckPath)
def get_EndpointConfigurationss(self): # RepeatList
return self.get_query_params().get('EndpointConfigurations')
def set_EndpointConfigurationss(self, EndpointConfigurations): # RepeatList
for depth1 in range(len(EndpointConfigurations)):
if EndpointConfigurations[depth1].get('Type') is not None:
self.add_query_param('EndpointConfigurations.' + str(depth1 + 1) + '.Type', EndpointConfigurations[depth1].get('Type'))
if EndpointConfigurations[depth1].get('EnableClientIPPreservation') is not None:
self.add_query_param('EndpointConfigurations.' + str(depth1 + 1) + '.EnableClientIPPreservation', EndpointConfigurations[depth1].get('EnableClientIPPreservation'))
if EndpointConfigurations[depth1].get('Weight') is not None:
self.add_query_param('EndpointConfigurations.' + str(depth1 + 1) + '.Weight', EndpointConfigurations[depth1].get('Weight'))
if EndpointConfigurations[depth1].get('EnableProxyProtocol') is not None:
self.add_query_param('EndpointConfigurations.' + str(depth1 + 1) + '.EnableProxyProtocol', EndpointConfigurations[depth1].get('EnableProxyProtocol'))
if EndpointConfigurations[depth1].get('Endpoint') is not None:
self.add_query_param('EndpointConfigurations.' + str(depth1 + 1) + '.Endpoint', EndpointConfigurations[depth1].get('Endpoint'))
def get_EndpointGroupType(self): # String
return self.get_query_params().get('EndpointGroupType')
def METHOD_NAME(self, EndpointGroupType): # String
self.add_query_param('EndpointGroupType', EndpointGroupType)
def get_AcceleratorId(self): # String
return self.get_query_params().get('AcceleratorId')
def set_AcceleratorId(self, AcceleratorId): # String
self.add_query_param('AcceleratorId', AcceleratorId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_TrafficPercentage(self): # Integer
return self.get_query_params().get('TrafficPercentage')
def set_TrafficPercentage(self, TrafficPercentage): # Integer
self.add_query_param('TrafficPercentage', TrafficPercentage)
def get_HealthCheckPort(self): # Integer
return self.get_query_params().get('HealthCheckPort')
def set_HealthCheckPort(self, HealthCheckPort): # Integer
self.add_query_param('HealthCheckPort', HealthCheckPort)
def get_ThresholdCount(self): # Integer
return self.get_query_params().get('ThresholdCount')
def set_ThresholdCount(self, ThresholdCount): # Integer
self.add_query_param('ThresholdCount', ThresholdCount)
def get_EndpointGroupRegion(self): # String
return self.get_query_params().get('EndpointGroupRegion')
def set_EndpointGroupRegion(self, EndpointGroupRegion): # String
self.add_query_param('EndpointGroupRegion', EndpointGroupRegion)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name) | null |
272 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdknlb.endpoint import endpoint_data
class ListLoadBalancersRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Nlb', '2022-04-30', 'ListLoadBalancers','nlb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_LoadBalancerNamess(self): # RepeatList
return self.get_query_params().get('LoadBalancerNames')
def set_LoadBalancerNamess(self, LoadBalancerNames): # RepeatList
for depth1 in range(len(LoadBalancerNames)):
self.add_query_param('LoadBalancerNames.' + str(depth1 + 1), LoadBalancerNames[depth1])
def get_LoadBalancerIdss(self): # RepeatList
return self.get_query_params().get('LoadBalancerIds')
def set_LoadBalancerIdss(self, LoadBalancerIds): # RepeatList
for depth1 in range(len(LoadBalancerIds)):
self.add_query_param('LoadBalancerIds.' + str(depth1 + 1), LoadBalancerIds[depth1])
def METHOD_NAME(self): # String
return self.get_query_params().get('AddressIpVersion')
def set_AddressIpVersion(self, AddressIpVersion): # String
self.add_query_param('AddressIpVersion', AddressIpVersion)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_DNSName(self): # String
return self.get_query_params().get('DNSName')
def set_DNSName(self, DNSName): # String
self.add_query_param('DNSName', DNSName)
def get_AddressType(self): # String
return self.get_query_params().get('AddressType')
def set_AddressType(self, AddressType): # String
self.add_query_param('AddressType', AddressType)
def get_Tag(self): # Array
return self.get_query_params().get('Tag')
def set_Tag(self, Tag): # Array
for index1, value1 in enumerate(Tag):
if value1.get('Key') is not None:
self.add_query_param('Tag.' + str(index1 + 1) + '.Key', value1.get('Key'))
if value1.get('Value') is not None:
self.add_query_param('Tag.' + str(index1 + 1) + '.Value', value1.get('Value'))
def get_VpcIdss(self): # RepeatList
return self.get_query_params().get('VpcIds')
def set_VpcIdss(self, VpcIds): # RepeatList
for depth1 in range(len(VpcIds)):
self.add_query_param('VpcIds.' + str(depth1 + 1), VpcIds[depth1])
def get_LoadBalancerBusinessStatus(self): # String
return self.get_query_params().get('LoadBalancerBusinessStatus')
def set_LoadBalancerBusinessStatus(self, LoadBalancerBusinessStatus): # String
self.add_query_param('LoadBalancerBusinessStatus', LoadBalancerBusinessStatus)
def get_LoadBalancerStatus(self): # String
return self.get_query_params().get('LoadBalancerStatus')
def set_LoadBalancerStatus(self, LoadBalancerStatus): # String
self.add_query_param('LoadBalancerStatus', LoadBalancerStatus)
def get_LoadBalancerType(self): # String
return self.get_query_params().get('LoadBalancerType')
def set_LoadBalancerType(self, LoadBalancerType): # String
self.add_query_param('LoadBalancerType', LoadBalancerType)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_MaxResults(self): # Integer
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Integer
self.add_query_param('MaxResults', MaxResults)
def get_Ipv6AddressType(self): # String
return self.get_query_params().get('Ipv6AddressType')
def set_Ipv6AddressType(self, Ipv6AddressType): # String
self.add_query_param('Ipv6AddressType', Ipv6AddressType) | null |
273 | import pytest
from django.test import RequestFactory
from osf.models import RegistrationSchema
from admin_tests.utilities import setup_view
from admin.registration_schemas import views
from django.contrib.messages.storage.fallback import FallbackStorage
from django.core.files.uploadedfile import SimpleUploadedFile
from osf_tests.factories import RegistrationProviderFactory, RegistrationFactory
@pytest.mark.django_db
class TestRegistrationSchemaList:
@pytest.fixture()
def req(self):
req = RequestFactory().get('/fake_path')
# django.contrib.messages has a bug which effects unittests
# more info here -> https://code.djangoproject.com/ticket/17971
setattr(req, 'session', 'session')
messages = FallbackStorage(req)
setattr(req, '_messages', messages)
return req
@pytest.fixture()
def registration_schema(self):
return RegistrationSchema.objects.create(
name='foo',
schema={'foo': 42, 'atomicSchema': True},
schema_version=1,
active=False,
visible=False
)
@pytest.fixture()
def view(self, req, registration_schema):
view = views.RegistrationSchemaListView()
view.kwargs = {'registration_schema_id': registration_schema.id}
return setup_view(view, req)
def test_registration_schema_list(self, view, registration_schema, req):
data = view.get_context_data()
assert any(item.id == registration_schema.id for item in data['registration_schemas'])
@pytest.mark.django_db
@pytest.mark.urls('admin.base.urls')
class TestRegistrationSchemaDetail:
@pytest.fixture()
def req(self):
req = RequestFactory().get('/fake_path')
# django.contrib.messages has a bug which effects unittests
# more info here -> https://code.djangoproject.com/ticket/17971
setattr(req, 'session', 'session')
messages = FallbackStorage(req)
setattr(req, '_messages', messages)
return req
@pytest.fixture()
def registration_schema(self):
return RegistrationSchema.objects.create(
name='foo',
schema={'foo': 42, 'atomicSchema': True},
schema_version=1,
active=False,
visible=False
)
@pytest.fixture()
def view(self, req, registration_schema):
plain_view = views.RegistrationSchemaDetailView()
view = setup_view(plain_view, req)
view.kwargs = {'registration_schema_id': registration_schema.id}
return view
def test_registration_schema_detail(self, view, registration_schema):
registration_schema.visible = True
registration_schema.active = True
registration_schema.save()
context = view.get_context_data()
assert context['registration_schema'] == registration_schema
assert context['form'].data['active'] == registration_schema.active
assert context['form'].data['visible'] == registration_schema.visible
def METHOD_NAME(self, view, registration_schema):
assert not registration_schema.visible
assert not registration_schema.active
form = view.get_form()
# `['on'] indicates a selected toggle from this form
form.data['active'] = ['on']
form.data['visible'] = ['on']
view.form_valid(form)
registration_schema.refresh_from_db()
assert registration_schema.visible
assert registration_schema.active
@pytest.mark.django_db
@pytest.mark.urls('admin.base.urls')
class TestCreateRegistrationSchema:
@pytest.fixture()
def req(self):
req = RequestFactory().get('/fake_path')
# django.contrib.messages has a bug which effects unittests
# more info here -> https://code.djangoproject.com/ticket/17971
setattr(req, 'session', 'session')
messages = FallbackStorage(req)
setattr(req, '_messages', messages)
return req
@pytest.fixture
def csv_data(self):
return b'block_type,display_text,help_text,example_text,required,registration_response_key,NOEX_updates,' \
b'NOEX_update_reason\npage-heading,This is the page heading,"This is extra, helpful context",,FALSE,,' \
b'FALSE,'
@pytest.fixture
def csv_file(self, csv_data):
return SimpleUploadedFile('test_file.csv', csv_data, content_type='application/csv')
@pytest.fixture()
def view(self, req):
plain_view = views.RegistrationSchemaCreateView()
view = setup_view(plain_view, req)
return view
@pytest.fixture()
def form(self, view, csv_file):
form = view.get_form()
form.data['name'] = 'Trust the Process'
form.files['schema'] = csv_file
return form
def test_registration_schema_create(self, view, csv_file, form, req):
view.form_valid(form)
registration_schema = RegistrationSchema.objects.get(name=form.data['name'])
assert registration_schema.schema_blocks.count() == 1
block = registration_schema.schema_blocks.first()
assert block.block_type == 'page-heading'
assert block.display_text == 'This is the page heading'
assert registration_schema.schema_version == 1
def test_registration_schema_increment_version(self, view, csv_file, form, req):
view.form_valid(form)
registration_schema = RegistrationSchema.objects.get_latest_version(name=form.data['name'])
assert registration_schema.schema_version == 1
view.form_valid(form)
registration_schema = RegistrationSchema.objects.get_latest_version(name=form.data['name'])
assert registration_schema.schema_version == 2
def test_registration_schema_csv_to_blocks(self, view, csv_file):
blocks = view.csv_to_blocks(csv_file)
assert len(blocks) == 1
assert blocks[0]['block_type'] == 'page-heading'
assert blocks[0]['display_text'] == 'This is the page heading'
@pytest.mark.django_db
@pytest.mark.urls('admin.base.urls')
class TestDeleteRegistrationSchema:
@pytest.fixture()
def req(self):
req = RequestFactory().get('/fake_path')
# django.contrib.messages has a bug which effects unittests
# more info here -> https://code.djangoproject.com/ticket/17971
setattr(req, 'session', 'session')
messages = FallbackStorage(req)
setattr(req, '_messages', messages)
return req
@pytest.fixture()
def registration_schema(self):
return RegistrationSchema.objects.create(
name='foo',
schema={'foo': 42, 'atomicSchema': True},
schema_version=1,
active=False,
visible=False
)
@pytest.fixture()
def registration(self, registration_schema):
registration = RegistrationFactory()
registration.registered_schema.add(registration_schema)
registration.save()
return registration
@pytest.fixture()
def provider(self, registration_schema):
provider = RegistrationProviderFactory()
registration_schema.providers.add(provider)
return provider
@pytest.fixture()
def view(self, req, registration_schema):
view = views.RegistrationSchemaDeleteView()
view = setup_view(view, req)
view.kwargs = {'registration_schema_id': registration_schema.id}
return view
def test_registration_schema_delete(self, req, view, registration_schema):
view.delete(req)
assert not RegistrationSchema.objects.filter(id=registration_schema.id)
def test_registration_schema_prevent_delete_if_used(self, req, view, registration_schema, provider, registration):
"""
If a Registration Schema is being used as part of registration it shouldn't be deletable from the admin app.
"""
view.delete(req)
assert RegistrationSchema.objects.filter(id=registration_schema.id) | null |
274 | """
This type stub file was generated by pyright.
"""
from django.contrib import admin
from django.utils.decorators import method_decorator
from django.views.decorators.http import require_POST
from .mixins import BaseExportMixin, BaseImportMixin
class ImportExportMixinBase:
def get_model_info(self): ...
class ImportMixin(BaseImportMixin, ImportExportMixinBase):
"""
Import mixin.
This is intended to be mixed with django.contrib.admin.ModelAdmin
https://docs.djangoproject.com/en/dev/ref/contrib/admin/
"""
change_list_template = ...
import_template_name = ...
from_encoding = ...
skip_admin_log = ...
tmp_storage_class = ...
def get_skip_admin_log(self): ...
def METHOD_NAME(self): ...
def has_import_permission(self, request): # -> Literal[True]:
"""
Returns whether a request has import permission.
"""
...
def get_urls(self): ...
@method_decorator(require_POST)
def process_import(self, request, *args, **kwargs): # -> HttpResponseRedirect | None:
"""
Perform the actual import action (after the user has confirmed the import)
"""
...
def process_dataset(self, dataset, confirm_form, request, *args, **kwargs): ...
def process_result(self, result, request): ...
def generate_log_entries(self, result, request): ...
def add_success_message(self, result, request): ...
def get_import_context_data(self, **kwargs): ...
def get_context_data(self, **kwargs): ...
def get_import_form(self): # -> Type[ImportForm]:
"""
Get the form type used to read the import format and file.
"""
...
def get_confirm_import_form(self): # -> Type[ConfirmImportForm]:
"""
Get the form type (class) used to confirm the import.
"""
...
def get_form_kwargs(self, form, *args, **kwargs): # -> dict[str, Unknown]:
"""
Prepare/returns kwargs for the import form.
To distinguish between import and confirm import forms,
the following approach may be used:
if isinstance(form, ImportForm):
# your code here for the import form kwargs
# e.g. update.kwargs({...})
elif isinstance(form, ConfirmImportForm):
# your code here for the confirm import form kwargs
# e.g. update.kwargs({...})
...
"""
...
def get_import_data_kwargs(self, request, *args, **kwargs): # -> dict[str, Unknown]:
"""
Prepare kwargs for import_data.
"""
...
def write_to_tmp_storage(self, import_file, input_format): ...
def import_action(
self, request, *args, **kwargs
): # -> HttpResponse | TemplateResponse:
"""
Perform a dry_run of the import to make sure the import will not
result in errors. If there where no error, save the user
uploaded file to a local temp file that will be used by
'process_import' for the actual import.
"""
...
def changelist_view(self, request, extra_context=...): ...
class ExportMixin(BaseExportMixin, ImportExportMixinBase):
"""
Export mixin.
This is intended to be mixed with django.contrib.admin.ModelAdmin
https://docs.djangoproject.com/en/dev/ref/contrib/admin/
"""
change_list_template = ...
export_template_name = ...
to_encoding = ...
def get_urls(self): ...
def has_export_permission(self, request): # -> Literal[True]:
"""
Returns whether a request has export permission.
"""
...
def get_export_queryset(self, request):
"""
Returns export queryset.
Default implementation respects applied search and filters.
"""
...
def get_export_data(self, file_format, queryset, *args, **kwargs):
"""
Returns file_format representation for given queryset.
"""
...
def get_export_context_data(self, **kwargs): ...
def get_context_data(self, **kwargs): ...
def get_export_form(self): # -> Type[ExportForm]:
"""
Get the form type used to read the export format.
"""
...
def export_action(self, request, *args, **kwargs): ...
def changelist_view(self, request, extra_context=...): ...
def get_export_filename(self, request, queryset, file_format): ...
class ImportExportMixin(ImportMixin, ExportMixin):
"""
Import and export mixin.
"""
change_list_template = ...
class ImportExportModelAdmin(ImportExportMixin, admin.ModelAdmin):
"""
Subclass of ModelAdmin with import/export functionality.
"""
...
class ExportActionMixin(ExportMixin):
"""
Mixin with export functionality implemented as an admin action.
"""
change_list_template = ...
def __init__(self, *args, **kwargs) -> None:
"""
Adds a custom action form initialized with the available export
formats.
"""
...
def export_admin_action(self, request, queryset): # -> HttpResponse | None:
"""
Exports the selected rows using file_format.
"""
...
def get_actions(self, request):
"""
Adds the export action to the list of available actions.
"""
...
@property
def media(self): ...
class ExportActionModelAdmin(ExportActionMixin, admin.ModelAdmin):
"""
Subclass of ModelAdmin with export functionality implemented as an
admin action.
"""
...
class ImportExportActionModelAdmin(ImportMixin, ExportActionModelAdmin):
"""
Subclass of ExportActionModelAdmin with import/export functionality.
Export functionality is implemented as an admin action.
"""
... | null |
275 | # Copyright 2022 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gym
import nnabla as nn
import nnabla.functions as F
import nnabla.parametric_functions as PF
import nnabla.solvers as S
import nnabla_rl.hooks as H
from nnabla_rl.algorithms import DQN, DQNConfig
from nnabla_rl.builders import ModelBuilder, SolverBuilder, ReplayBufferBuilder
from nnabla_rl.environments.wrappers import ScreenRenderEnv, NumpyFloat32Env
from nnabla_rl.models import DiscreteQFunction
from nnabla_rl.replay_buffer import ReplayBuffer
from nnabla_rl.replay_buffers import MemoryEfficientAtariBuffer
from nnabla_rl.utils.reproductions import build_atari_env # noqa
from nnabla_rl.utils.evaluator import EpisodicEvaluator
from nnabla_rl.writers import FileWriter
def build_classic_control_env(env_name, render=False):
env = gym.make(env_name)
env = NumpyFloat32Env(env)
if render:
# render environment if render is True
env = ScreenRenderEnv(env)
return env
class ExampleClassicControlQFunction(DiscreteQFunction):
def __init__(self, scope_name: str, n_action: int):
super(ExampleClassicControlQFunction, self).__init__(scope_name)
self._n_action = n_action
def all_q(self, s: nn.Variable) -> nn.Variable:
with nn.parameter_scope(self.scope_name):
with nn.parameter_scope("affine1"):
h = PF.affine(s, n_outmaps=100)
h = F.relu(h)
with nn.parameter_scope("affine2"):
h = PF.affine(h, n_outmaps=100)
h = F.relu(h)
with nn.parameter_scope("affine3"):
h = PF.affine(h, n_outmaps=100)
h = F.relu(h)
with nn.parameter_scope("affine4"):
h = PF.affine(h, n_outmaps=self._n_action)
return h
class ExampleAtariQFunction(DiscreteQFunction):
def __init__(self, scope_name: str, n_action: int):
super(ExampleAtariQFunction, self).__init__(scope_name)
self._n_action = n_action
def all_q(self, s: nn.Variable) -> nn.Variable:
with nn.parameter_scope(self.scope_name):
with nn.parameter_scope("conv1"):
h = PF.convolution(s, 32, (8, 8), stride=(4, 4))
h = F.relu(h)
with nn.parameter_scope("conv2"):
h = PF.convolution(h, 64, (4, 4), stride=(2, 2))
h = F.relu(h)
with nn.parameter_scope("conv3"):
h = PF.convolution(h, 64, (3, 3), stride=(1, 1))
h = F.relu(h)
h = F.reshape(h, (-1, 3136))
with nn.parameter_scope("affine1"):
h = PF.affine(h, 512)
h = F.relu(h)
with nn.parameter_scope("affine2"):
h = PF.affine(h, self._n_action)
return h
class ExampleQFunctionBuilder(ModelBuilder):
def __init__(self, is_atari=False):
self._is_atari = is_atari
def METHOD_NAME(self, scope_name, env_info, algorithm_config, **kwargs):
if self._is_atari:
return ExampleAtariQFunction(scope_name, env_info.action_dim)
else:
return ExampleClassicControlQFunction(scope_name, env_info.action_dim)
class ExampleQSolverBuilder(SolverBuilder):
def build_solver(self, env_info, algorithm_config, **kwargs):
config: DQNConfig = algorithm_config
solver = S.Adam(alpha=config.learning_rate)
return solver
class ExampleReplayBufferBuilder(ReplayBufferBuilder):
def __init__(self, is_atari=False):
self._is_atari = is_atari
def build_replay_buffer(self, env_info, algorithm_config, **kwargs):
config: DQNConfig = algorithm_config
if self._is_atari:
return MemoryEfficientAtariBuffer(capacity=config.replay_buffer_size)
else:
return ReplayBuffer(capacity=config.replay_buffer_size)
def train():
# nnabla-rl's Reinforcement learning algorithm requires environment that implements gym.Env interface
# for the details of gym.Env see: https://github.com/openai/gym
env_name = 'CartPole-v1'
train_env = build_classic_control_env(env_name)
# evaluation env is used only for running the evaluation of models during the training.
# if you do not evaluate the model during the training, this environment is not necessary.
eval_env = build_classic_control_env(env_name, render=True)
is_atari = False
start_timesteps = 5000
max_explore_steps = 10000
evaluation_timing = 10000
total_iterations = 100000
# If you want to train on atari games, uncomment below
# You can change the name of environment to change the game to train.
# For the list of available games see: https://gym.openai.com/envs/#atari
# Your machine must at least have more than 20GB of memory to run the training.
# Adjust the replay_buffer_size through DQNConfig if you do not have enough memory on your machine.
# env_name = 'BreakoutNoFrameskip-v4'
# train_env = build_atari_env(env_name)
# eval_env = build_atari_env(env_name, test=True, render=True)
# is_atari = True
# start_timesteps = 50000
# max_explore_steps = 1000000
# evaluation_timing = 250000
# total_iterations = 50000000
# Will output evaluation results and model snapshots to the outdir
outdir = f'{env_name}_results'
# Writer will save the evaluation results to file.
# If you set writer=None, evaluator will only print the evaluation results on terminal.
writer = FileWriter(outdir, "evaluation_result")
evaluator = EpisodicEvaluator(run_per_evaluation=5)
# evaluate the trained model with eval_env every 5000 iterations
# change the timing to 250000 on atari games.
evaluation_hook = H.EvaluationHook(
eval_env, evaluator, timing=evaluation_timing, writer=writer)
# This will print the iteration number every 100 iteration.
# Printing iteration number is convenient for checking the training progress.
# You can change this number to any number of your choice.
iteration_num_hook = H.IterationNumHook(timing=100)
# save the trained model every 5000 iterations
# change the timing to 250000 on atari games.
save_snapshot_hook = H.SaveSnapshotHook(outdir, timing=evaluation_timing)
# Set gpu_id to -1 to train on cpu.
gpu_id = 0
config = DQNConfig(gpu_id=gpu_id, learning_rate=1e-4,
start_timesteps=start_timesteps, max_explore_steps=max_explore_steps)
dqn = DQN(train_env,
config=config,
q_func_builder=ExampleQFunctionBuilder(is_atari=is_atari),
q_solver_builder=ExampleQSolverBuilder(),
replay_buffer_builder=ExampleReplayBufferBuilder(is_atari=is_atari))
# Set instanciated hooks to periodically run additional jobs
dqn.set_hooks(
hooks=[evaluation_hook, iteration_num_hook, save_snapshot_hook])
dqn.train(train_env, total_iterations=total_iterations)
if __name__ == '__main__':
train() | null |
276 | # Copyright 2017,2018,2019,2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def check_arch_or_die(arch):
# See available archs
import sys
from models.registry import get_available_archs
archs = get_available_archs()
if arch in archs:
return
print('Available architectures (spcify with -a option):')
for an in archs:
print('*', an)
sys.exit(1)
def METHOD_NAME(value):
if not isinstance(value, str):
value = str(value)
return value.lower()
def parse_tuple(x):
return tuple(map(int, x.split(',')))
def add_runtime_args(parser):
parser.add_argument("--device-id", "-d", type=str, default='0',
help='Device ID the training run on. This is only valid if you specify `-c cudnn`.')
parser.add_argument("--type-config", "-t", type=str, default='float',
help='Type configuration.')
parser.add_argument('--context', '-c', type=str,
default=None, help="Extension module. 'cudnn' is highly.recommended.")
def add_arch_args(parser):
parser.add_argument('--arch', '-a', type=METHOD_NAME,
default='', help='Architecture type. See available choices for architecture by passing null string "".')
parser.add_argument('--num-classes', type=int, default=1000,
help='Number of categories of classification.')
def add_train_dataset_args(parser, train_dir='./', train_list="train_label"):
parser.add_argument("--train-dir", '-T', type=str, default=train_dir,
help='Directory containing training data.')
parser.add_argument("--train-list", type=str, default=train_list,
help='Training file list.')
def add_val_dataset_args(parser, val_dir='./', val_list="val_label"):
parser.add_argument("--val-dir", '-V', type=str, default=val_dir,
help='Directory containing validation data.')
parser.add_argument("--val-list", type=str, default=val_list,
help='Validation file list.')
def add_dataset_args(parser):
add_train_dataset_args(parser)
add_val_dataset_args(parser)
def add_training_args(parser):
parser.add_argument("--batch-size", "-b", type=int, default=128,
help='Batch size per worker. The default is 128.')
parser.add_argument("--epochs", "-e", type=int, default=None,
help='Number of epochs for training. It overwrites the config described by `--train-config`.')
parser.add_argument("--monitor-path", "-m",
type=str, default=None,
help='Path monitoring logs saved.')
parser.add_argument("--val-interval", "-v", type=int, default=10,
help='Evaluation with validation dataset is performed at every interval epochs specified.')
parser.add_argument("--model-save-interval", "-s", type=int, default=10,
help='The epoch interval of saving model parameters.')
parser.add_argument("--model-load-path", type=str, default=None,
help='Path to the model parameters to be loaded.')
parser.add_argument('--train-config', '-C', type=str, default='cfg/train_default.yaml',
help='A config file which describes optimization configuration such as default batch size, solver, number of epochs, and learning rate scheduling.')
parser.add_argument("--finetune", action='store_true',
help='Finetuning from a pre-trained parameter set by `--model-load-path`. The final linear layer will be replaced with a new fully connected layer for a new task which outputs the number of classes specified by `--num-classes`.')
def mb_to_b(mb):
return int(mb) * (1 << 20)
def add_dali_args(parser):
parser.add_argument("--dali-num-threads", type=int, default=4,
help="DALI's number of CPU threads.")
parser.add_argument('--dali-prefetch-queue', type=int,
default=2, help="DALI prefetch queue depth")
parser.add_argument('--dali-nvjpeg-memory-padding-mb', type=mb_to_b, default=64,
dest='dali_nvjpeg_memory_padding',
help="Memory padding value for nvJPEG (in MB)")
def post_process_spatial_size(args):
if isinstance(args.spatial_size, int):
args.spatial_size = (args.spatial_size, args.spatial_size)
elif len(args.spatial_size) == 1:
args.spatial_size = args.spatial_size * 2
def resize_by_ratio(size):
""" Resize the size according to the imagenet training ratio.
"""
resize = round(256 / 224 * size / 2) * 2
return resize
def get_train_args():
"""
Get command line arguments.
Arguments set the default values of command line arguments.
"""
import argparse
import os
parser = argparse.ArgumentParser(
description='''ImageNet classification example.
''')
add_runtime_args(parser)
add_arch_args(parser)
parser.add_argument("--channel-last", action='store_true',
help='Use a model with NHWC layout.')
parser.add_argument("--spatial-size", type=int, default=224, nargs="+",
help='Spatial size.')
add_training_args(parser)
add_dataset_args(parser)
add_dali_args(parser)
args = parser.parse_args()
# Post process
post_process_spatial_size(args)
# Check arch is available
check_arch_or_die(args.arch)
if args.monitor_path is None:
import datetime
args.monitor_path = 'tmp.monitor.' + \
datetime.datetime.now().strftime('%Y%m%d%H%M%S')
from utils import read_yaml
train_config = read_yaml(args.train_config)
if args.epochs is not None:
train_config.epochs = args.epochs
return args, train_config | null |
277 | import re
from pathlib import Path
from click.testing import CliRunner, Result
from ggshield.__main__ import cli
from ggshield.core.errors import ExitCode
from tests.conftest import (
_IAC_MULTIPLE_VULNERABILITIES,
_IAC_NO_VULNERABILITIES,
_IAC_SINGLE_VULNERABILITY,
)
from tests.unit.conftest import assert_invoke_exited_with, assert_invoke_ok, my_vcr
@my_vcr.use_cassette("test_iac_scan_single_vulnerability")
def test_display_single_vulnerability(tmp_path, cli_fs_runner: CliRunner):
(tmp_path / "iac_file_single_vulnerability.tf").write_text(
_IAC_SINGLE_VULNERABILITY
)
result = cli_fs_runner.invoke(
cli,
[
"iac",
"scan",
"all",
str(tmp_path),
],
)
assert_iac_version_displayed(result)
assert_file_single_vulnerability_displayed(result)
assert_documentation_url_displayed(result)
@my_vcr.use_cassette("test_iac_scan_single_vulnerability")
def test_exit_zero_single_vulnerability(tmp_path, cli_fs_runner: CliRunner):
(tmp_path / "iac_file_single_vulnerability.tf").write_text(
_IAC_SINGLE_VULNERABILITY
)
result = cli_fs_runner.invoke(
cli,
[
"iac",
"scan",
"all",
"--exit-zero",
str(tmp_path),
],
)
assert_invoke_ok(result)
@my_vcr.use_cassette("test_iac_scan_multiple_vulnerabilities")
def test_display_multiple_vulnerabilities(tmp_path, cli_fs_runner: CliRunner):
(tmp_path / "iac_file_multiple_vulnerabilities.tf").write_text(
_IAC_MULTIPLE_VULNERABILITIES
)
result = cli_fs_runner.invoke(
cli,
[
"iac",
"scan",
"all",
str(tmp_path),
],
)
assert_iac_version_displayed(result)
assert_file_multiple_vulnerabilities_displayed(result)
METHOD_NAME(result)
@my_vcr.use_cassette("test_iac_scan_no_vulnerabilities")
def test_display_no_vulnerability(tmp_path, cli_fs_runner: CliRunner):
(tmp_path / "iac_file_no_vulnerabilities.tf").write_text(_IAC_NO_VULNERABILITIES)
result = cli_fs_runner.invoke(
cli,
[
"iac",
"scan",
"all",
str(tmp_path),
],
)
assert_iac_version_displayed(result)
assert "No incidents have been found" in result.stdout
METHOD_NAME(result)
assert_invoke_ok(result)
@my_vcr.use_cassette("test_iac_scan_multiple_files")
def test_display_multiple_files(cli_fs_runner: CliRunner):
Path("tmp/").mkdir(exist_ok=True)
Path("tmp/iac_file_single_vulnerability.tf").write_text(_IAC_SINGLE_VULNERABILITY)
Path("tmp/iac_file_multiple_vulnerabilities.tf").write_text(
_IAC_MULTIPLE_VULNERABILITIES
)
Path("tmp/iac_file_no_vulnerabilities.tf").write_text(_IAC_NO_VULNERABILITIES)
result = cli_fs_runner.invoke(
cli,
[
"iac",
"scan",
"all",
"tmp",
],
)
assert_iac_version_displayed(result)
assert_file_single_vulnerability_displayed(result)
assert_file_multiple_vulnerabilities_displayed(result)
METHOD_NAME(result)
def assert_iac_version_displayed(result: Result):
assert re.search(r"iac-engine-version: \d\.\d{1,3}\.\d", result.stdout)
def METHOD_NAME(result: Result):
assert "Error scanning. Results may be incomplete." not in result.stdout
def assert_documentation_url_displayed(result: Result):
base_doc_url = "https://docs.gitguardian.com/iac-scanning/policies/"
regex = r"\((GG_IAC_\d{4})\).+" + base_doc_url.replace(".", r"\.") + r"\1"
assert re.search(
regex,
result.stdout,
re.S,
)
def assert_file_single_vulnerability_displayed(result: Result):
assert "iac_file_single_vulnerability.tf: 1 incident detected" in result.stdout
assert set(re.findall(r"GG_IAC_\d{4}", result.stdout)) >= {
"GG_IAC_0001",
}
assert '2 | resource "aws_alb_listener" "bad_example" {' in result.stdout
assert_invoke_exited_with(result, ExitCode.SCAN_FOUND_PROBLEMS)
def assert_file_multiple_vulnerabilities_displayed(result: Result):
assert "iac_file_multiple_vulnerabilities.tf: 2 incidents detected" in result.stdout
assert set(re.findall(r"GG_IAC_\d{4}", result.stdout)) >= {
"GG_IAC_0002",
"GG_IAC_0003",
}
assert '2 | resource "aws_security_group" "bad_example" {' in result.stdout
assert '8 | resource "aws_security_group_rule" "bad_example" {' in result.stdout
assert_invoke_exited_with(result, ExitCode.SCAN_FOUND_PROBLEMS) | null |
278 | from typing import Any, Dict, List, Optional, Tuple
from boa3.internal.model.builtin.method.builtinmethod import IBuiltinMethod
from boa3.internal.model.type.primitive.ibytestringtype import IByteStringType
from boa3.internal.model.variable import Variable
from boa3.internal.neo.vm.opcode import OpcodeHelper
from boa3.internal.neo.vm.opcode.Opcode import Opcode
class IsDigitMethod(IBuiltinMethod):
def __init__(self, self_type: IByteStringType = None):
from boa3.internal.model.type.type import Type
if not isinstance(self_type, IByteStringType):
self_type = Type.bytes
identifier = 'isdigit'
args: Dict[str, Variable] = {'self': Variable(self_type)}
super().__init__(identifier, args, return_type=Type.bool)
@property
def _arg_self(self) -> Variable:
return self.args['self']
@property
def _opcode(self) -> List[Tuple[Opcode, bytes]]:
from boa3.internal.compiler.codegenerator import get_bytes_count
from boa3.internal.neo.vm.type.Integer import Integer
from boa3.internal.neo.vm.type.StackItem import StackItemType
number0 = Integer(ord('0')).to_byte_array()
number9 = Integer(ord('9')).to_byte_array()
jmp_place_holder = (Opcode.JMP, b'\x01')
initializing = [ # initialize auxiliary values
(Opcode.DUP, b''),
(Opcode.SIZE, b''),
(Opcode.DEC, b''), # index = len(string) - 1
(Opcode.PUSH1, b''), # isdigit = True
]
verify_empty_string = [ # verifies if string is empty
(Opcode.OVER, b''),
(Opcode.PUSHM1, b''),
jmp_place_holder, # jump to change_to_false if index == -1
]
skip_first_verify_while = [ # skips the first while verification, since string is not empty
jmp_place_holder
]
verify_while = [ # verifies if while is over
(Opcode.OVER, b''),
(Opcode.PUSH0, b''),
jmp_place_holder, # jump to return_bool if index >= len(string)
]
jmp_verify_while = OpcodeHelper.get_jump_and_data(Opcode.JMP, get_bytes_count(verify_while), True)
skip_first_verify_while[-1] = jmp_verify_while
while_verify_lt_0 = [ # verifies if ord(string[index]) is < ord('0')
(Opcode.PUSH2, b''),
(Opcode.PICK, b''),
(Opcode.PUSH2, b''),
(Opcode.PICK, b''),
(Opcode.PUSH1, b''),
(Opcode.SUBSTR, b''),
(Opcode.CONVERT, StackItemType.ByteString),
(Opcode.DUP, b''),
(Opcode.PUSHDATA1, Integer(len(number0)).to_byte_array() + number0),
jmp_place_holder, # if ord(string[index]) < ord('0'), return False
]
while_verify_gt_9 = [ # verifies if ord(string[index]) is > ord('9')
(Opcode.PUSHDATA1, Integer(len(number9)).to_byte_array() + number9),
jmp_place_holder, # if ord(string[index]) > ord('9'), return False
]
while_go_to_verify = [ # decreases index and goes back to verify if there all characters were visited already
(Opcode.SWAP, b''),
(Opcode.DEC, b''), # index--
(Opcode.SWAP, b''),
# jump back to verify_while
]
jmp_back_to_verify = OpcodeHelper.get_jump_and_data(Opcode.JMP, -get_bytes_count(verify_while +
while_verify_lt_0 +
while_verify_gt_9 +
while_go_to_verify))
while_go_to_verify.append(jmp_back_to_verify)
jmp_out_of_while = OpcodeHelper.get_jump_and_data(Opcode.JMPLT, get_bytes_count(while_verify_gt_9 +
while_go_to_verify), True)
while_verify_lt_0[-1] = jmp_out_of_while
drop_char = [ # remove extra char from stack
(Opcode.DROP, b'')
]
jmp_to_change_to_false = OpcodeHelper.get_jump_and_data(Opcode.JMPGT, get_bytes_count(while_go_to_verify +
drop_char), True)
while_verify_gt_9[-1] = jmp_to_change_to_false
change_to_false = [ # remove True on top of stack and put False
(Opcode.DROP, b''),
(Opcode.PUSH0, b''),
]
jmp_to_return = OpcodeHelper.get_jump_and_data(Opcode.JMPEQ, get_bytes_count(skip_first_verify_while +
verify_while +
while_verify_lt_0 +
while_verify_gt_9 +
while_go_to_verify +
drop_char), True)
verify_empty_string[-1] = jmp_to_return
jmp_to_return = OpcodeHelper.get_jump_and_data(Opcode.JMPLT, get_bytes_count(while_verify_lt_0 +
while_verify_gt_9 +
while_go_to_verify +
drop_char +
change_to_false), True)
verify_while[-1] = jmp_to_return
clean_and_return_bool = [ # remove extra values from stack
(Opcode.NIP, b''),
(Opcode.NIP, b'')
]
return (
initializing +
verify_empty_string +
skip_first_verify_while +
verify_while +
while_verify_lt_0 +
while_verify_gt_9 +
while_go_to_verify +
drop_char +
change_to_false +
clean_and_return_bool
)
def push_self_first(self) -> bool:
return self.has_self_argument
@property
def _args_on_stack(self) -> int:
return len(self.args)
@property
def _body(self) -> Optional[str]:
return None
def METHOD_NAME(self, value: Any) -> IBuiltinMethod:
if isinstance(value, IByteStringType):
return IsDigitMethod(value)
return super().METHOD_NAME(value) | null |
279 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdrds.endpoint import endpoint_data
class CreateDrdsDBRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Drds', '2019-01-23', 'CreateDrdsDB','drds')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Encode(self):
return self.get_query_params().get('Encode')
def set_Encode(self,Encode):
self.add_query_param('Encode',Encode)
def get_RdsInstances(self):
return self.get_query_params().get('RdsInstance')
def set_RdsInstances(self, RdsInstances):
for depth1 in range(len(RdsInstances)):
if RdsInstances[depth1] is not None:
self.add_query_param('RdsInstance.' + str(depth1 + 1) , RdsInstances[depth1])
def get_Type(self):
return self.get_query_params().get('Type')
def set_Type(self,Type):
self.add_query_param('Type',Type)
def get_Password(self):
return self.get_query_params().get('Password')
def METHOD_NAME(self,Password):
self.add_query_param('Password',Password)
def get_RdsSuperAccounts(self):
return self.get_query_params().get('RdsSuperAccount')
def set_RdsSuperAccounts(self, RdsSuperAccounts):
for depth1 in range(len(RdsSuperAccounts)):
if RdsSuperAccounts[depth1].get('Password') is not None:
self.add_query_param('RdsSuperAccount.' + str(depth1 + 1) + '.Password', RdsSuperAccounts[depth1].get('Password'))
if RdsSuperAccounts[depth1].get('AccountName') is not None:
self.add_query_param('RdsSuperAccount.' + str(depth1 + 1) + '.AccountName', RdsSuperAccounts[depth1].get('AccountName'))
if RdsSuperAccounts[depth1].get('DbInstanceId') is not None:
self.add_query_param('RdsSuperAccount.' + str(depth1 + 1) + '.DbInstanceId', RdsSuperAccounts[depth1].get('DbInstanceId'))
def get_AccountName(self):
return self.get_query_params().get('AccountName')
def set_AccountName(self,AccountName):
self.add_query_param('AccountName',AccountName)
def get_DrdsInstanceId(self):
return self.get_query_params().get('DrdsInstanceId')
def set_DrdsInstanceId(self,DrdsInstanceId):
self.add_query_param('DrdsInstanceId',DrdsInstanceId)
def get_DbInstanceIsCreating(self):
return self.get_query_params().get('DbInstanceIsCreating')
def set_DbInstanceIsCreating(self,DbInstanceIsCreating):
self.add_query_param('DbInstanceIsCreating',DbInstanceIsCreating)
def get_InstDbNames(self):
return self.get_query_params().get('InstDbName')
def set_InstDbNames(self, InstDbNames):
for depth1 in range(len(InstDbNames)):
if InstDbNames[depth1].get('ShardDbName') is not None:
for depth2 in range(len(InstDbNames[depth1].get('ShardDbName'))):
if InstDbNames[depth1].get('ShardDbName')[depth2] is not None:
self.add_query_param('InstDbName.' + str(depth1 + 1) + '.ShardDbName.' + str(depth2 + 1) , InstDbNames[depth1].get('ShardDbName')[depth2])
if InstDbNames[depth1].get('DbInstanceId') is not None:
self.add_query_param('InstDbName.' + str(depth1 + 1) + '.DbInstanceId', InstDbNames[depth1].get('DbInstanceId'))
def get_DbName(self):
return self.get_query_params().get('DbName')
def set_DbName(self,DbName):
self.add_query_param('DbName',DbName)
def get_DbInstType(self):
return self.get_query_params().get('DbInstType')
def set_DbInstType(self,DbInstType):
self.add_query_param('DbInstType',DbInstType | null |
280 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class DescribeRouteEntryListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'DescribeRouteEntryList','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DestCidrBlockLists(self): # RepeatList
return self.get_query_params().get('DestCidrBlockList')
def set_DestCidrBlockLists(self, DestCidrBlockList): # RepeatList
for depth1 in range(len(DestCidrBlockList)):
self.add_query_param('DestCidrBlockList.' + str(depth1 + 1), DestCidrBlockList[depth1])
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_RouteEntryName(self): # String
return self.get_query_params().get('RouteEntryName')
def set_RouteEntryName(self, RouteEntryName): # String
self.add_query_param('RouteEntryName', RouteEntryName)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_RouteEntryType(self): # String
return self.get_query_params().get('RouteEntryType')
def set_RouteEntryType(self, RouteEntryType): # String
self.add_query_param('RouteEntryType', RouteEntryType)
def get_IpVersion(self): # String
return self.get_query_params().get('IpVersion')
def set_IpVersion(self, IpVersion): # String
self.add_query_param('IpVersion', IpVersion)
def get_NextHopId(self): # String
return self.get_query_params().get('NextHopId')
def set_NextHopId(self, NextHopId): # String
self.add_query_param('NextHopId', NextHopId)
def get_NextHopType(self): # String
return self.get_query_params().get('NextHopType')
def set_NextHopType(self, NextHopType): # String
self.add_query_param('NextHopType', NextHopType)
def get_RouteTableId(self): # String
return self.get_query_params().get('RouteTableId')
def set_RouteTableId(self, RouteTableId): # String
self.add_query_param('RouteTableId', RouteTableId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_DestinationCidrBlock(self): # String
return self.get_query_params().get('DestinationCidrBlock')
def set_DestinationCidrBlock(self, DestinationCidrBlock): # String
self.add_query_param('DestinationCidrBlock', DestinationCidrBlock)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('MaxResult')
def set_MaxResult(self, MaxResult): # Integer
self.add_query_param('MaxResult', MaxResult)
def get_ServiceType(self): # String
return self.get_query_params().get('ServiceType')
def set_ServiceType(self, ServiceType): # String
self.add_query_param('ServiceType', ServiceType)
def get_RouteEntryId(self): # String
return self.get_query_params().get('RouteEntryId')
def set_RouteEntryId(self, RouteEntryId): # String
self.add_query_param('RouteEntryId', RouteEntryId) | null |
281 | #!/bin/python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.sqlhtml
"""
import os
import sys
import unittest
from gluon.compileapp import run_controller_in, run_view_in, compile_application, remove_compiled_application
from gluon.languages import TranslatorFactory
from gluon.storage import Storage, List
from gluon import fileutils
from gluon.dal import DAL, Field, Table
from gluon.http import HTTP
from gluon.fileutils import open_file
from gluon.cache import CacheInRam
DEFAULT_URI = os.getenv('DB', 'sqlite:memory')
def fake_check_credentials(foo):
return True
class TestAppAdmin(unittest.TestCase):
def METHOD_NAME(self):
from gluon.globals import Request, Response, Session, current
from gluon.html import A, DIV, FORM, MENU, TABLE, TR, INPUT, URL, XML
from gluon.html import ASSIGNJS
from gluon.validators import IS_NOT_EMPTY
from gluon.compileapp import LOAD
from gluon.http import HTTP, redirect
from gluon.tools import Auth
from gluon.sql import SQLDB
from gluon.sqlhtml import SQLTABLE, SQLFORM
self.original_check_credentials = fileutils.check_credentials
fileutils.check_credentials = fake_check_credentials
request = Request(env={})
request.application = 'welcome'
request.controller = 'appadmin'
request.function = self._testMethodName.split('_')[1]
request.folder = 'applications/welcome'
request.env.http_host = '127.0.0.1:8000'
request.env.remote_addr = '127.0.0.1'
response = Response()
session = Session()
T = TranslatorFactory('', 'en')
session.connect(request, response)
current.request = request
current.response = response
current.session = session
current.T = T
db = DAL(DEFAULT_URI, check_reserved=['all'])
auth = Auth(db)
auth.define_tables(username=True, signature=False)
db.define_table('t0', Field('tt'), auth.signature)
# Create a user
db.auth_user.insert(first_name='Bart',
last_name='Simpson',
username='user1',
email='[email protected]',
password='password_123',
registration_key=None,
registration_id=None)
self.env = locals()
def tearDown(self):
fileutils.check_credentials = self.original_check_credentials
def run_function(self):
return run_controller_in(self.env['request'].controller, self.env['request'].function, self.env)
def run_view(self):
return run_view_in(self.env)
def run_view_file_stream(self):
view_path = os.path.join(self.env['request'].folder, 'views', 'appadmin.html')
self.env['response'].view = open_file(view_path, 'r')
return run_view_in(self.env)
def _test_index(self):
result = self.run_function()
self.assertTrue('db' in result['databases'])
self.env.update(result)
try:
self.run_view()
self.run_view_file_stream()
except Exception as e:
import traceback
print(traceback.format_exc())
self.fail('Could not make the view')
def test_index(self):
self._test_index()
def test_index_compiled(self):
appname_path = os.path.join(os.getcwd(), 'applications', 'welcome')
compile_application(appname_path)
self._test_index()
remove_compiled_application(appname_path)
def test_index_minify(self):
# test for gluon/contrib/minify
self.env['response'].optimize_css = 'concat|minify'
self.env['response'].optimize_js = 'concat|minify'
self.env['current'].cache = Storage({'ram':CacheInRam()})
appname_path = os.path.join(os.getcwd(), 'applications', 'welcome')
self._test_index()
file_l = os.listdir(os.path.join(appname_path, 'static', 'temp'))
file_l.sort()
self.assertTrue(len(file_l) == 2)
self.assertEqual(file_l[0][0:10], 'compressed')
self.assertEqual(file_l[1][0:10], 'compressed')
self.assertEqual(file_l[0][-3:], 'css')
self.assertEqual(file_l[1][-2:], 'js')
def test_select(self):
request = self.env['request']
request.args = List(['db'])
request.env.query_string = 'query=db.auth_user.id>0'
result = self.run_function()
self.assertTrue('table' in result and 'query' in result)
self.assertTrue(result['table'] == 'auth_user')
self.assertTrue(result['query'] == 'db.auth_user.id>0')
self.env.update(result)
try:
self.run_view()
except Exception as e:
import traceback
print(traceback.format_exc())
self.fail('Could not make the view')
def test_insert(self):
request = self.env['request']
request.args = List(['db', 'auth_user'])
result = self.run_function()
self.assertTrue('table' in result)
self.assertTrue('form' in result)
self.assertTrue(str(result['table']) == 'auth_user')
self.env.update(result)
try:
self.run_view()
except Exception as e:
import traceback
print(traceback.format_exc())
self.fail('Could not make the view')
def test_insert_submit(self):
request = self.env['request']
request.args = List(['db', 'auth_user'])
form = self.run_function()['form']
hidden_fields = form.hidden_fields()
data = {}
data['_formkey'] = hidden_fields.element('input', _name='_formkey')['_value']
data['_formname'] = hidden_fields.element('input', _name='_formname')['_value']
data['first_name'] = 'Lisa'
data['last_name'] = 'Simpson'
data['username'] = 'lisasimpson'
data['password'] = 'password_123'
data['email'] = '[email protected]'
request._vars = data
result = self.run_function()
self.env.update(result)
try:
self.run_view()
except Exception as e:
import traceback
print(traceback.format_exc())
self.fail('Could not make the view')
db = self.env['db']
lisa_record = db(db.auth_user.username == 'lisasimpson').select().first()
self.assertIsNotNone(lisa_record)
del data['_formkey']
del data['_formname']
del data['password']
for key in data:
self.assertEqual(data[key], lisa_record[key])
def test_update_submit(self):
request = self.env['request']
request.args = List(['db', 'auth_user', '1'])
form = self.run_function()['form']
hidden_fields = form.hidden_fields()
data = {}
data['_formkey'] = hidden_fields.element('input', _name='_formkey')['_value']
data['_formname'] = hidden_fields.element('input', _name='_formname')['_value']
for element in form.elements('input'):
data[element['_name']] = element['_value']
data['email'] = '[email protected]'
data['id'] = '1'
request._vars = data
self.assertRaises(HTTP, self.run_function) | null |
282 | from itertools import chain
from math import isnan
from numbers import Real, Integral
import numpy as np
from Orange.data import Value, Unknown, DiscreteVariable
__all__ = ["Instance"]
class Instance:
def __init__(self, METHOD_NAME, data=None, id=None):
"""
Construct a new data instance.
:param domain: domain that describes the instance's variables
:type domain: Orange.data.Domain
:param data: instance's values
:type data: Orange.data.Instance or a sequence of values
:param id: instance id
:type id: hashable value
"""
if data is None and isinstance(METHOD_NAME, Instance):
data = METHOD_NAME
METHOD_NAME = data.METHOD_NAME
self._domain = METHOD_NAME
if data is None:
self._x = np.repeat(Unknown, len(METHOD_NAME.attributes))
self._y = np.repeat(Unknown, len(METHOD_NAME.class_vars))
self._metas = np.array([var.Unknown for var in METHOD_NAME.metas],
dtype=object)
self._weight = 1
elif isinstance(data, Instance) and data.METHOD_NAME == METHOD_NAME:
self._x = np.array(data._x)
self._y = np.atleast_1d(np.array(data._y))
self._metas = np.array(data._metas)
self._weight = data._weight
else:
self._x, self._y, self._metas = METHOD_NAME.convert(data)
self._y = np.atleast_1d(self._y)
self._weight = 1
if id is not None:
self.id = id
else:
from Orange.data import Table
self.id = Table.new_id()
@property
def METHOD_NAME(self):
"""The domain describing the instance's values."""
return self._domain
@property
def x(self):
"""
Instance's attributes as a 1-dimensional numpy array whose length
equals `len(self.domain.attributes)`.
"""
return self._x
@property
def y(self):
"""
Instance's classes as a 1-dimensional numpy array whose length
equals `len(self.domain.attributes)`.
"""
return self._y
@property
def metas(self):
"""
Instance's meta attributes as a 1-dimensional numpy array whose length
equals `len(self.domain.attributes)`.
"""
return self._metas
@property
def list(self):
"""
All instance's values, including attributes, classes and meta
attributes, as a list whose length equals `len(self.domain.attributes)
+ len(self.domain.class_vars) + len(self.domain.metas)`.
"""
n_self, n_metas = len(self), len(self._metas)
return [self[i].value if i < n_self else self[n_self - i - 1].value
for i in range(n_self + n_metas)]
@property
def weight(self):
"""The weight of the data instance. Default is 1."""
return self._weight
@weight.setter
def weight(self, weight):
self._weight = weight
def __setitem__(self, key, value):
if not isinstance(key, Integral):
key = self._domain.index(key)
value = self._domain[key].to_val(value)
if key >= 0 and not isinstance(value, (int, float)):
raise TypeError("Expected primitive value, got '%s'" %
type(value).__name__)
if 0 <= key < len(self._domain.attributes):
self._x[key] = value
elif len(self._domain.attributes) <= key:
self._y[key - len(self.METHOD_NAME.attributes)] = value
else:
self._metas[-1 - key] = value
def __getitem__(self, key):
idx = key if isinstance(key, Integral) else self._domain.index(key)
if 0 <= idx < len(self._domain.attributes):
value = self._x[idx]
elif idx >= len(self._domain.attributes):
if self._y.ndim == 0:
value = self._y
else:
value = self._y[idx - len(self.METHOD_NAME.attributes)]
else:
value = self._metas[-1 - idx]
var = self._domain[idx]
if isinstance(key, DiscreteVariable) and var is not key:
value = key.get_mapper_from(var)(value)
var = key
return Value(var, value)
#TODO Should we return an instance of `object` if we have a meta attribute
# that is not Discrete or Continuous? E.g. when we have strings, we'd
# like to be able to use startswith, lower etc...
# Or should we even return Continuous as floats and use Value only
# for discrete attributes?!
# Same in Table.__getitem__
@staticmethod
def str_values(data, variables, limit=True):
if limit:
s = ", ".join(var.str_val(val)
for var, val in zip(variables, data[:5]))
if len(data) > 5:
s += ", ..."
return s
else:
return ", ".join(var.str_val(val)
for var, val in zip(variables, data))
def _str(self, limit):
s = "[" + self.str_values(self._x, self._domain.attributes, limit)
if self._domain.class_vars:
s += " | " + \
self.str_values(self._y, self._domain.class_vars, limit)
s += "]"
if self._domain.metas:
s += " {" + \
self.str_values(self._metas, self._domain.metas, limit) + \
"}"
return s
def __str__(self):
return self._str(False)
def __repr__(self):
return self._str(True)
def __eq__(self, other):
if not isinstance(other, Instance):
other = Instance(self._domain, other)
def same(x1, x2):
nan1 = np.isnan(x1)
nan2 = np.isnan(x2)
return np.array_equal(nan1, nan2) and \
np.array_equal(x1[~nan1], x2[~nan2])
return same(self._x, other._x) and same(self._y, other._y) \
and all(m1 == m2 or
type(m1) == type(m2) == float and isnan(m1) and isnan(m2)
for m1, m2 in zip(self._metas, other._metas))
@classmethod
def __hash__(cls):
raise TypeError(f"unhashable type: '{type(cls.__name__)}'")
def __iter__(self):
return chain(iter(self._x), iter(self._y))
def values(self):
return (Value(var, val)
for var, val in zip(self.METHOD_NAME.variables, self))
def __len__(self):
return len(self._x) + len(self._y)
def attributes(self):
"""Return iterator over the instance's attributes"""
return iter(self._x)
def classes(self):
"""Return iterator over the instance's class attributes"""
return iter(self._y)
# A helper function for get_class and set_class
def _check_single_class(self):
if not self._domain.class_vars:
raise TypeError("Domain has no class variable")
elif len(self._domain.class_vars) > 1:
raise TypeError("Domain has multiple class variables")
def get_class(self):
"""
Return the class value as an instance of :obj:`Orange.data.Value`.
Throws an exception if there are multiple classes.
"""
self._check_single_class()
return Value(self._domain.class_var, self._y[0])
def get_classes(self):
"""
Return the class value as a list of instances of
:obj:`Orange.data.Value`.
"""
return (Value(var, value)
for var, value in zip(self._domain.class_vars, self._y))
def set_class(self, value):
"""
Set the instance's class. Throws an exception if there are multiple
classes.
"""
self._check_single_class()
if not isinstance(value, Real):
self._y[0] = self._domain.class_var.to_val(value)
else:
self._y[0] = value | null |
283 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcloudauth.endpoint import endpoint_data
class CompareFaceVerifyRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cloudauth', '2019-03-07', 'CompareFaceVerify')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ProductCode(self): # String
return self.get_body_params().get('ProductCode')
def set_ProductCode(self, ProductCode): # String
self.add_body_params('ProductCode', ProductCode)
def get_TargetCertifyId(self): # String
return self.get_body_params().get('TargetCertifyId')
def set_TargetCertifyId(self, TargetCertifyId): # String
self.add_body_params('TargetCertifyId', TargetCertifyId)
def get_TargetFaceContrastPicture(self): # String
return self.get_body_params().get('TargetFaceContrastPicture')
def set_TargetFaceContrastPicture(self, TargetFaceContrastPicture): # String
self.add_body_params('TargetFaceContrastPicture', TargetFaceContrastPicture)
def get_TargetOssBucketName(self): # String
return self.get_body_params().get('TargetOssBucketName')
def set_TargetOssBucketName(self, TargetOssBucketName): # String
self.add_body_params('TargetOssBucketName', TargetOssBucketName)
def get_OuterOrderNo(self): # String
return self.get_body_params().get('OuterOrderNo')
def set_OuterOrderNo(self, OuterOrderNo): # String
self.add_body_params('OuterOrderNo', OuterOrderNo)
def get_SourceFaceContrastPicture(self): # String
return self.get_body_params().get('SourceFaceContrastPicture')
def set_SourceFaceContrastPicture(self, SourceFaceContrastPicture): # String
self.add_body_params('SourceFaceContrastPicture', SourceFaceContrastPicture)
def get_SourceCertifyId(self): # String
return self.get_body_params().get('SourceCertifyId')
def set_SourceCertifyId(self, SourceCertifyId): # String
self.add_body_params('SourceCertifyId', SourceCertifyId)
def get_TargetFaceContrastPictureUrl(self): # String
return self.get_body_params().get('TargetFaceContrastPictureUrl')
def set_TargetFaceContrastPictureUrl(self, TargetFaceContrastPictureUrl): # String
self.add_body_params('TargetFaceContrastPictureUrl', TargetFaceContrastPictureUrl)
def get_SourceOssObjectName(self): # String
return self.get_body_params().get('SourceOssObjectName')
def set_SourceOssObjectName(self, SourceOssObjectName): # String
self.add_body_params('SourceOssObjectName', SourceOssObjectName)
def get_SourceOssBucketName(self): # String
return self.get_body_params().get('SourceOssBucketName')
def METHOD_NAME(self, SourceOssBucketName): # String
self.add_body_params('SourceOssBucketName', SourceOssBucketName)
def get_TargetOssObjectName(self): # String
return self.get_body_params().get('TargetOssObjectName')
def set_TargetOssObjectName(self, TargetOssObjectName): # String
self.add_body_params('TargetOssObjectName', TargetOssObjectName)
def get_SceneId(self): # Long
return self.get_body_params().get('SceneId')
def set_SceneId(self, SceneId): # Long
self.add_body_params('SceneId', SceneId)
def get_SourceFaceContrastPictureUrl(self): # String
return self.get_body_params().get('SourceFaceContrastPictureUrl')
def set_SourceFaceContrastPictureUrl(self, SourceFaceContrastPictureUrl): # String
self.add_body_params('SourceFaceContrastPictureUrl', SourceFaceContrastPictureUrl)
def get_Crop(self): # String
return self.get_body_params().get('Crop')
def set_Crop(self, Crop): # String
self.add_body_params('Crop', Crop) | null |
284 | # coding=utf-8
# Copyright 2023 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Patch Transformerm similar to Gshard paper with BatchEnsemble MLPs."""
import dataclasses
from typing import Any, Callable, Mapping, Optional, Sequence, Tuple
import edward2.jax as ed
import flax.linen as nn
import jax.numpy as jnp
from uncertainty_baselines.models import vit
from uncertainty_baselines.models import vit_batchensemble
DType = type(jnp.float32)
InitializeFn = Callable[[jnp.ndarray, Sequence[int], DType], jnp.ndarray]
Params = Mapping[str, Any]
default_kwarg_dict = lambda: dataclasses.field(default_factory=dict)
class VisionTransformerBEGP(nn.Module):
"""BatchEnsemble Vision Transformer with Gaussian process last layer.
You must specify either the vertical and horizontal resolution of the patches
(patch_size), or the number of vertical and horizontal divisions of the input
image (patch_grid).
"""
num_classes: int
transformer: Params
hidden_size: int
patch_size: Optional[Tuple[int, int]] = None
patch_grid: Optional[Tuple[int, int]] = None
representation_size: Optional[int] = None
classifier: str = "token"
head_kernel_init: InitializeFn = nn.initializers.zeros
use_gp_layer: bool = True
gp_layer_kwargs: Mapping[str, Any] = default_kwarg_dict()
train: Optional[bool] = None
def setup(self):
# pylint:disable=not-a-mapping
if self.use_gp_layer:
self.gp_layer = ed.nn.RandomFeatureGaussianProcess(
features=self.num_classes, name="head", **self.gp_layer_kwargs)
# pylint:enable=not-a-mapping
def METHOD_NAME(self,
images: jnp.ndarray,
hidden_size: int,
patch_size: Optional[Tuple[int, int]] = None,
patch_grid: Optional[Tuple[int, int]] = None) -> jnp.ndarray:
n, h, w, _ = images.shape
if patch_size is None == patch_grid is None:
raise ValueError(
"You must specify either patch_size or patch_grid, and not both "
f"(patch_size = {patch_size}, patch_grid = {patch_grid})")
elif patch_size is None:
patch_size = (h // patch_grid[0], w // patch_grid[1])
x = nn.Conv(
hidden_size,
patch_size,
strides=patch_size,
padding="VALID",
name="embedding")(
images)
return jnp.reshape(x, [n, -1, hidden_size])
@nn.compact
def __call__(self, images: jnp.ndarray, train: Optional[bool] = None,
mean_field_factor: float = -1., **gp_kwargs):
train = nn.module.merge_param("train", self.train, train)
# Convert images to patches.
x = self.METHOD_NAME(images, self.hidden_size, self.patch_size, self.patch_grid)
# Add "class" token if necessary.
n, _, c = x.shape
if self.classifier == "token":
cls = self.param("cls", nn.initializers.zeros, (1, 1, self.hidden_size))
cls = jnp.tile(cls, [n, 1, 1])
x = jnp.concatenate([cls, x], axis=1)
# Encode tokens.
x, extra_info = vit_batchensemble.BatchEnsembleEncoder(
train=train, name="Transformer", **self.transformer)(
x)
# Reduce tokens to a single vector representation.
if self.classifier == "token":
# Take the first token's output as representation as in BERT.
x = x[:, 0]
elif self.classifier == "gap":
# Average all tokens.
x = jnp.mean(x, axis=tuple(range(1, x.ndim - 1))) # (1,) or (1, 2)
elif self.classifier == "map":
probe = self.param("probe", nn.initializers.xavier_uniform(), (1, 1, c))
# x may have been subject to tiling, n can be different from x.shape[0].
probe = jnp.tile(probe, [x.shape[0], 1, 1])
attention = nn.MultiHeadDotProductAttention(
deterministic=not train,
num_heads=self.transformer.get("attention", {}).get("num_heads", 1),
kernel_init=nn.initializers.xavier_uniform())
x = attention(inputs_q=probe, inputs_kv=x)
y = nn.LayerNorm()(x)
y = vit.MlpBlock(
mlp_dim=self.transformer["mlp_dim"], dropout_rate=0)(
y, deterministic=not train)
x = (x + y)[:, 0]
else:
raise ValueError(f"Unknown classifier: {self.classifier}")
if self.representation_size is None:
x = vit.IdentityLayer(name="pre_logits")(x)
extra_info["pre_logits"] = x
else:
x = nn.Dense(self.representation_size, name="pre_logits")(x)
extra_info["pre_logits"] = x
x = nn.tanh(x)
if self.use_gp_layer:
x_gp = self.gp_layer(x, **gp_kwargs)
# Gaussian process layer output: a tuple of logits, covmat, and optionally
# random features.
extra_info["covmat"] = x_gp[1]
if len(x_gp) > 2:
extra_info["random_features"] = x_gp[2]
if train:
x = x_gp[0]
else:
# During inference, compute posterior mean by adjusting the original
# logits with predictive uncertainty.
x = ed.nn.utils.mean_field_logits(
logits=x_gp[0], covmat=x_gp[1], mean_field_factor=mean_field_factor)
else:
x = nn.Dense(
self.num_classes, kernel_init=self.head_kernel_init,
name="batchensemble_head")(
x)
return x, extra_info
def vision_transformer_be_gp(
num_classes: int,
hidden_size: int,
transformer: Params,
patch_size: Optional[Tuple[int, int]] = None,
patch_grid: Optional[Tuple[int, int]] = None,
representation_size: Optional[int] = None,
classifier: str = "token",
head_kernel_init: InitializeFn = nn.initializers.zeros,
use_gp_layer: bool = True,
gp_layer_kwargs: Mapping[str, Any] = default_kwarg_dict(),
train: Optional[bool] = None):
"""Builds a BatchEnsemble GP Vision Transformer (ViT) model."""
# TODO(dusenberrymw): Add API docs once the config dict in VisionTransformerBE
# is cleaned up.
return VisionTransformerBEGP(
num_classes=num_classes,
transformer=transformer,
hidden_size=hidden_size,
patch_size=patch_size,
patch_grid=patch_grid,
representation_size=representation_size,
classifier=classifier,
head_kernel_init=head_kernel_init,
use_gp_layer=use_gp_layer,
gp_layer_kwargs=gp_layer_kwargs,
train=train) | null |
285 | # coding: utf-8
"""
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
"""
import unittest
from unittest.mock import patch
import urllib3
import typing_extensions
import unit_test_api
from unit_test_api.paths.request_body_post_oneof_complex_types_request_body.post import operation as post # noqa: E501
from unit_test_api import schemas, api_client
from unit_test_api.configurations import api_configuration, schema_configuration
from .. import ApiTestMixin
class TestPost(ApiTestMixin, unittest.TestCase):
"""
Post unit test stubs
"""
api_config = api_configuration.ApiConfiguration()
schema_config = schema_configuration.SchemaConfiguration()
used_api_client = api_client.ApiClient(configuration=api_config, schema_configuration=schema_config)
api = post.ApiForPost(api_client=used_api_client) # noqa: E501
response_status = 200
response_body = ''
def test_first_oneof_valid_complex_passes(self):
content_type = 'application/json'
# first oneOf valid (complex)
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
{
"bar":
2,
}
)
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
mock_request.return_value = self.response(
self.json_bytes(self.response_body),
status=self.response_status
)
api_response = self.api.post(
body=body,
)
self.assert_pool_manager_request_called_with(
mock_request,
self.api_config.get_server_url('servers', None) + "/requestBody/postOneofComplexTypesRequestBody",
method='post'.upper(),
body=self.json_bytes(payload),
content_type=content_type,
)
assert isinstance(api_response.response, urllib3.HTTPResponse)
assert isinstance(api_response.body, schemas.Unset)
def METHOD_NAME(self):
content_type = 'application/json'
# neither oneOf valid (complex)
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
{
"foo":
2,
"bar":
"quux",
}
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def test_both_oneof_valid_complex_fails(self):
content_type = 'application/json'
# both oneOf valid (complex)
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
{
"foo":
"baz",
"bar":
2,
}
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def test_second_oneof_valid_complex_passes(self):
content_type = 'application/json'
# second oneOf valid (complex)
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
{
"foo":
"baz",
}
)
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
mock_request.return_value = self.response(
self.json_bytes(self.response_body),
status=self.response_status
)
api_response = self.api.post(
body=body,
)
self.assert_pool_manager_request_called_with(
mock_request,
self.api_config.get_server_url('servers', None) + "/requestBody/postOneofComplexTypesRequestBody",
method='post'.upper(),
body=self.json_bytes(payload),
content_type=content_type,
)
assert isinstance(api_response.response, urllib3.HTTPResponse)
assert isinstance(api_response.body, schemas.Unset)
if __name__ == '__main__':
unittest.main() | null |
286 | # This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
import unittest as ut
from h5py import h5p, h5f, version
from .common import TestCase
class TestLibver(TestCase):
"""
Feature: Setting/getting lib ver bounds
"""
def test_libver(self):
""" Test libver bounds set/get """
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST)
self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 10, 2),
'Requires HDF5 1.10.2 or later')
def test_libver_v18(self):
""" Test libver bounds set/get for H5F_LIBVER_V18"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_V18)
self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_V18),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 10, 2),
'Requires HDF5 1.10.2 or later')
def test_libver_v110(self):
""" Test libver bounds set/get for H5F_LIBVER_V110"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_V18, h5f.LIBVER_V110)
self.assertEqual((h5f.LIBVER_V18, h5f.LIBVER_V110),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 11, 4),
'Requires HDF5 1.11.4 or later')
def test_libver_v112(self):
""" Test libver bounds set/get for H5F_LIBVER_V112"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_V18, h5f.LIBVER_V112)
self.assertEqual((h5f.LIBVER_V18, h5f.LIBVER_V112),
plist.get_libver_bounds())
class TestDA(TestCase):
'''
Feature: setting/getting chunk cache size on a dataset access property list
'''
def test_chunk_cache(self):
'''test get/set chunk cache '''
dalist = h5p.create(h5p.DATASET_ACCESS)
nslots = 10000 # 40kb hash table
nbytes = 1000000 # 1MB cache size
w0 = .5 # even blend of eviction strategy
dalist.set_chunk_cache(nslots, nbytes, w0)
self.assertEqual((nslots, nbytes, w0),
dalist.get_chunk_cache())
@ut.skipIf(version.hdf5_version_tuple < (1, 8, 17),
'Requires HDF5 1.8.17 or later')
def test_efile_prefix(self):
'''test get/set efile prefix '''
dalist = h5p.create(h5p.DATASET_ACCESS)
self.assertEqual(dalist.get_efile_prefix().decode(), '')
efile_prefix = "path/to/external/dataset"
dalist.set_efile_prefix(efile_prefix.encode('utf-8'))
self.assertEqual(dalist.get_efile_prefix().decode(),
efile_prefix)
efile_prefix = "${ORIGIN}"
dalist.set_efile_prefix(efile_prefix.encode('utf-8'))
self.assertEqual(dalist.get_efile_prefix().decode(),
efile_prefix)
@ut.skipIf(version.hdf5_version_tuple < (1, 10, 2),
'Requires HDF5 1.10.2 or later')
def test_virtual_prefix(self):
'''test get/set virtual prefix '''
dalist = h5p.create(h5p.DATASET_ACCESS)
self.assertEqual(dalist.get_virtual_prefix().decode(), '')
virtual_prefix = "path/to/virtual/dataset"
dalist.set_virtual_prefix(virtual_prefix.encode('utf-8'))
self.assertEqual(dalist.get_virtual_prefix().decode(),
virtual_prefix)
class TestFA(TestCase):
'''
Feature: setting/getting mdc config on a file access property list
'''
def test_mdc_config(self):
'''test get/set mdc config '''
falist = h5p.create(h5p.FILE_ACCESS)
config = falist.get_mdc_config()
falist.set_mdc_config(config)
def test_set_alignment(self):
'''test get/set chunk cache '''
falist = h5p.create(h5p.FILE_ACCESS)
threshold = 10 * 1024 # threshold of 10kiB
alignment = 1024 * 1024 # threshold of 1kiB
falist.set_alignment(threshold, alignment)
self.assertEqual((threshold, alignment),
falist.get_alignment())
@ut.skipUnless(
version.hdf5_version_tuple >= (1, 12, 1) or
(version.hdf5_version_tuple[:2] == (1, 10) and version.hdf5_version_tuple[2] >= 7),
'Requires HDF5 1.12.1 or later or 1.10.x >= 1.10.7')
def METHOD_NAME(self):
'''test get/set file locking'''
falist = h5p.create(h5p.FILE_ACCESS)
use_file_locking = False
ignore_when_disabled = False
falist.set_file_locking(use_file_locking, ignore_when_disabled)
self.assertEqual((use_file_locking, ignore_when_disabled),
falist.get_file_locking())
class TestPL(TestCase):
def test_obj_track_times(self):
"""
tests if the object track times set/get
"""
# test for groups
gcid = h5p.create(h5p.GROUP_CREATE)
gcid.set_obj_track_times(False)
self.assertEqual(False, gcid.get_obj_track_times())
gcid.set_obj_track_times(True)
self.assertEqual(True, gcid.get_obj_track_times())
# test for datasets
dcid = h5p.create(h5p.DATASET_CREATE)
dcid.set_obj_track_times(False)
self.assertEqual(False, dcid.get_obj_track_times())
dcid.set_obj_track_times(True)
self.assertEqual(True, dcid.get_obj_track_times())
# test for generic objects
ocid = h5p.create(h5p.OBJECT_CREATE)
ocid.set_obj_track_times(False)
self.assertEqual(False, ocid.get_obj_track_times())
ocid.set_obj_track_times(True)
self.assertEqual(True, ocid.get_obj_track_times())
def test_link_creation_tracking(self):
"""
tests the link creation order set/get
"""
gcid = h5p.create(h5p.GROUP_CREATE)
gcid.set_link_creation_order(0)
self.assertEqual(0, gcid.get_link_creation_order())
flags = h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED
gcid.set_link_creation_order(flags)
self.assertEqual(flags, gcid.get_link_creation_order())
# test for file creation
fcpl = h5p.create(h5p.FILE_CREATE)
fcpl.set_link_creation_order(flags)
self.assertEqual(flags, fcpl.get_link_creation_order())
def test_attr_phase_change(self):
"""
test the attribute phase change
"""
cid = h5p.create(h5p.OBJECT_CREATE)
# test default value
ret = cid.get_attr_phase_change()
self.assertEqual((8,6), ret)
# max_compact must < 65536 (64kb)
with self.assertRaises(ValueError):
cid.set_attr_phase_change(65536, 6)
# Using dense attributes storage to avoid 64kb size limitation
# for a single attribute in compact attribute storage.
cid.set_attr_phase_change(0, 0)
self.assertEqual((0,0), cid.get_attr_phase_change()) | null |
287 | from boa3_test.tests.boa_test import BoaTest # needs to be the first import to avoid circular imports
from boa3.internal.exception import CompilerError
from boa3.internal.neo.vm.type.String import String
from boa3.internal.neo3.vm import VMState
from boa3_test.test_drive.testrunner.neo_test_runner import NeoTestRunner
class TestReversed(BoaTest):
default_folder: str = 'test_sc/reversed_test'
def test_reversed_list_bool(self):
path, _ = self.get_deploy_file_paths('ReversedListBool.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
list_bool = [True, True, False]
invokes.append(runner.call_contract(path, 'main'))
reversed_list = list(reversed(list_bool))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reversed_list_bytes(self):
path, _ = self.get_deploy_file_paths('ReversedListBytes.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
list_bytes = [b'1', b'2', b'3']
reversed_list = [String.from_bytes(element) for element in reversed(list_bytes)]
invokes.append(runner.call_contract(path, 'main'))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reversed_list_int(self):
path, _ = self.get_deploy_file_paths('ReversedListInt.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
list_int = [1, 2, 3]
invokes.append(runner.call_contract(path, 'main'))
reversed_list = list(reversed(list_int))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reversed_list_str(self):
path, _ = self.get_deploy_file_paths('ReversedListStr.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
list_str = ['neo3-boa', 'unit', 'test']
invokes.append(runner.call_contract(path, 'main'))
reversed_list = list(reversed(list_str))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reversed_list(self):
path, _ = self.get_deploy_file_paths('ReversedList.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
list_any = [1, 'string', False]
invokes.append(runner.call_contract(path, 'main', list_any))
reversed_list = list(reversed(list_any))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reversed_string(self):
path, _ = self.get_deploy_file_paths('ReversedString.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
string = 'unit_test'
invokes.append(runner.call_contract(path, 'main', string))
reversed_list = list(reversed(string))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reversed_bytes(self):
path, _ = self.get_deploy_file_paths('ReversedBytes.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
bytes_value = b'unit_test'
reversed_list = list(reversed(bytes_value))
invokes.append(runner.call_contract(path, 'main', bytes_value))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def METHOD_NAME(self):
path, _ = self.get_deploy_file_paths('ReversedRange.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
reversed_list = list(reversed(range(3)))
invokes.append(runner.call_contract(path, 'main'))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reversed_tuple(self):
path, _ = self.get_deploy_file_paths('ReversedTuple.py')
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
tuple_value = (1, 2, 3)
reversed_list = list(reversed(tuple_value))
invokes.append(runner.call_contract(path, 'main', tuple_value))
expected_results.append(reversed_list)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_mismatched_type(self):
path = self.get_contract_path('ReversedParameterMismatchedType')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path) | null |
288 | """Implementation of the CNN Decoder part of
"Convolutional Sequence to Sequence Learning"
"""
import torch
import torch.nn as nn
from onmt.modules import ConvMultiStepAttention, GlobalAttention
from onmt.utils.cnn_factory import shape_transform, GatedConv
from onmt.decoders.decoder import DecoderBase
SCALE_WEIGHT = 0.5**0.5
class CNNDecoder(DecoderBase):
"""Decoder based on "Convolutional Sequence to Sequence Learning"
:cite:`DBLP:journals/corr/GehringAGYD17`.
Consists of residual convolutional layers, with ConvMultiStepAttention.
"""
def __init__(
self,
num_layers,
hidden_size,
attn_type,
copy_attn,
cnn_kernel_width,
dropout,
embeddings,
copy_attn_type,
):
super(CNNDecoder, self).__init__()
self.cnn_kernel_width = cnn_kernel_width
self.embeddings = embeddings
# Decoder State
self.state = {}
input_size = self.embeddings.embedding_size
self.linear = nn.Linear(input_size, hidden_size)
self.conv_layers = nn.ModuleList(
[
GatedConv(hidden_size, cnn_kernel_width, dropout, True)
for i in range(num_layers)
]
)
self.attn_layers = nn.ModuleList(
[ConvMultiStepAttention(hidden_size) for i in range(num_layers)]
)
# CNNDecoder has its own attention mechanism.
# Set up a separate copy attention layer if needed.
assert not copy_attn, "Copy mechanism not yet tested in conv2conv"
if copy_attn:
self.copy_attn = GlobalAttention(hidden_size, attn_type=copy_attn_type)
else:
self.copy_attn = None
@classmethod
def from_opt(cls, opt, embeddings):
"""Alternate constructor."""
return cls(
opt.dec_layers,
opt.dec_hid_size,
opt.global_attention,
opt.copy_attn,
opt.cnn_kernel_width,
opt.dropout[0] if type(opt.dropout) is list else opt.dropout,
embeddings,
opt.copy_attn_type,
)
def init_state(self, _, enc_out, enc_hidden):
"""Init decoder state."""
self.state["src"] = (enc_out + enc_hidden) * SCALE_WEIGHT
self.state["previous_input"] = None
def map_state(self, fn):
self.state["src"] = fn(self.state["src"], 0)
if self.state["previous_input"] is not None:
self.state["previous_input"] = fn(self.state["previous_input"], 0)
def detach_state(self):
self.state["previous_input"] = self.state["previous_input"].detach()
def METHOD_NAME(self, tgt, enc_out, step=None, **kwargs):
"""See :obj:`onmt.modules.RNNDecoderBase.forward()`"""
if self.state["previous_input"] is not None:
tgt = torch.cat([self.state["previous_input"], tgt], 1)
dec_outs = []
attns = {"std": []}
if self.copy_attn is not None:
attns["copy"] = []
emb = self.embeddings(tgt)
assert emb.dim() == 3 # batch x len x embedding_dim
tgt_emb = emb
# The output of CNNEncoder.
enc_out_t = enc_out
# The combination of output of CNNEncoder and source embeddings.
enc_out_c = self.state["src"]
emb_reshape = tgt_emb.view(tgt_emb.size(0) * tgt_emb.size(1), -1)
linear_out = self.linear(emb_reshape)
x = linear_out.view(tgt_emb.size(0), tgt_emb.size(1), -1)
x = shape_transform(x)
pad = torch.zeros(x.size(0), x.size(1), self.cnn_kernel_width - 1, 1)
pad = pad.type_as(x)
base_target_emb = x
for conv, attention in zip(self.conv_layers, self.attn_layers):
new_target_input = torch.cat([pad, x], 2)
out = conv(new_target_input)
c, attn = attention(base_target_emb, out, enc_out_t, enc_out_c)
x = (x + (c + out) * SCALE_WEIGHT) * SCALE_WEIGHT
dec_outs = x.squeeze(3).transpose(1, 2)
# Process the result and update the attentions.
if self.state["previous_input"] is not None:
dec_outs = dec_outs[:, self.state["previous_input"].size(1) :, :]
attn = attn[:, self.state["previous_input"].size(1) :].squeeze()
attn = torch.stack([attn])
attns["std"] = attn
if self.copy_attn is not None:
attns["copy"] = attn
# Update the state.
self.state["previous_input"] = tgt
# TODO change the way attns is returned dict => list or tuple (onnx)
return dec_outs, attns
def update_dropout(self, dropout, attention_dropout=None):
for layer in self.conv_layers:
layer.dropout.p = dropout | null |
289 | from boa3_test.tests.boa_test import BoaTest # needs to be the first import to avoid circular imports
from boa3.internal.exception import CompilerError
from boa3.internal.neo.vm.opcode.Opcode import Opcode
from boa3.internal.neo3.vm import VMState
from boa3_test.test_drive.testrunner.neo_test_runner import NeoTestRunner
class TestNone(BoaTest):
default_folder: str = 'test_sc/none_test'
def test_variable_none(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x00'
+ Opcode.PUSHNULL
+ Opcode.STLOC0
+ Opcode.RET # return
)
path = self.get_contract_path('VariableNone.py')
output = self.compile(path)
self.assertEqual(expected_output, output)
def test_none_tuple(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x00'
+ Opcode.PUSHNULL # a = (None, None, None)
+ Opcode.PUSHNULL
+ Opcode.PUSHNULL
+ Opcode.PUSH3
+ Opcode.PACK
+ Opcode.STLOC0
+ Opcode.RET # return
)
path = self.get_contract_path('NoneTuple.py')
output = self.compile(path)
self.assertEqual(expected_output, output)
def test_none_identity(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x00'
+ b'\x01'
+ Opcode.LDARG0
+ Opcode.ISNULL
+ Opcode.RET # return
)
path = self.get_contract_path('NoneIdentity.py')
output = self.compile(path)
self.assertEqual(expected_output, output)
path, _ = self.get_deploy_file_paths(path)
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
invokes.append(runner.call_contract(path, 'Main', None))
expected_results.append(True)
invokes.append(runner.call_contract(path, 'Main', 5))
expected_results.append(False)
invokes.append(runner.call_contract(path, 'Main', '5'))
expected_results.append(False)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_none_not_identity(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x00'
+ b'\x01'
+ Opcode.LDARG0
+ Opcode.ISNULL
+ Opcode.NOT
+ Opcode.RET # return
)
path = self.get_contract_path('NoneNotIdentity.py')
output = self.compile(path)
self.assertEqual(expected_output, output)
path, _ = self.get_deploy_file_paths(path)
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
invokes.append(runner.call_contract(path, 'Main', None))
expected_results.append(False)
invokes.append(runner.call_contract(path, 'Main', 5))
expected_results.append(True)
invokes.append(runner.call_contract(path, 'Main', '5'))
expected_results.append(True)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_none_equality(self):
path = self.get_contract_path('NoneEquality.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path)
def test_mismatched_type_int_operation(self):
path = self.get_contract_path('MismatchedTypesInOperation.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path)
def test_reassign_variable_with_none(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x02'
+ b'\x00'
+ Opcode.PUSH2 # a = 2
+ Opcode.STLOC0
+ Opcode.PUSH4 # b = a * 2
+ Opcode.STLOC1
+ Opcode.PUSHNULL # a = None
+ Opcode.STLOC0
+ Opcode.RET # return
)
path = self.get_contract_path('ReassignVariableWithNone.py')
output = self.compile(path)
self.assertEqual(expected_output, output)
path, _ = self.get_deploy_file_paths(path)
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
invokes.append(runner.call_contract(path, 'Main'))
expected_results.append(None)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def test_reassign_variable_after_none(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x02'
+ b'\x00'
+ Opcode.PUSHNULL # a = None
+ Opcode.STLOC0
+ Opcode.PUSH2 # a = 2
+ Opcode.STLOC0
+ Opcode.PUSH4 # b = a * 2
+ Opcode.STLOC1
+ Opcode.RET # return
)
path = self.get_contract_path('ReassignVariableAfterNone.py')
output = self.compile(path)
self.assertEqual(expected_output, output)
path, _ = self.get_deploy_file_paths(path)
runner = NeoTestRunner(runner_id=self.method_name())
invokes = []
expected_results = []
invokes.append(runner.call_contract(path, 'Main'))
expected_results.append(None)
runner.execute()
self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error)
for x in range(len(invokes)):
self.assertEqual(expected_results[x], invokes[x].result)
def METHOD_NAME(self):
path = self.get_contract_path('NoneBoa2Test.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path) | null |
290 | """
Copyright (c) 2023, NVIDIA CORPORATION.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tensorflow as tf
import sparse_operation_kit as sok
from models import SOKEmbedding
import os, glob
class EarlyStopper:
def __init__(self):
self._stop = False
def set_stop(self, message):
self._stop = True
self._stop_reason = message
@property
def stop_reason(self):
return self._stop_reason
def should_stop(self):
return self._stop
class WarmUpAndPolyDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
"""Learning rate callable for the embeddings.
Linear warmup on [0, warmup_steps] then
Constant on [warmup_steps, decay_start_steps]
And polynomial decay on [decay_start_steps, decay_start_steps + decay_steps].
"""
def __init__(
self,
batch_size: int,
decay_exp: float = 2.0,
learning_rate: float = 40.0,
warmup_steps: int = 8000,
decay_steps: int = 12000,
decay_start_steps: int = 10000,
):
super(WarmUpAndPolyDecay, self).__init__()
self.batch_size = batch_size
self.decay_exp = decay_exp
self.learning_rate = learning_rate
self.warmup_steps = warmup_steps
self.decay_steps = decay_steps
self.decay_start_steps = decay_start_steps
def __call__(self, step):
decay_exp = self.decay_exp
learning_rate = self.learning_rate
warmup_steps = self.warmup_steps
decay_steps = self.decay_steps
decay_start_steps = self.decay_start_steps
scal = self.batch_size / 2048
adj_lr = learning_rate * scal
if warmup_steps == 0:
return adj_lr
warmup_lr = step / warmup_steps * adj_lr
global_step = tf.cast(step, tf.float32)
decay_steps = tf.cast(decay_steps, tf.float32)
decay_start_step = tf.cast(decay_start_steps, tf.float32)
warmup_lr = tf.cast(warmup_lr, tf.float32)
steps_since_decay_start = global_step - decay_start_step
already_decayed_steps = tf.minimum(steps_since_decay_start, decay_steps)
decay_lr = adj_lr * ((decay_steps - already_decayed_steps) / decay_steps) ** decay_exp
decay_lr = tf.maximum(0.0001, decay_lr)
lr = tf.where(
global_step < warmup_steps,
warmup_lr,
tf.where(
tf.logical_and(decay_steps > 0, global_step > decay_start_step), decay_lr, adj_lr
),
)
lr = tf.maximum(0.01, lr)
return lr
def get_config(self):
return {
"batch_size": self.batch_size,
"decay_exp": self.decay_exp,
"learning_rate": self.learning_rate,
"warmup_steps": self.warmup_steps,
"decay_steps": self.decay_steps,
"decay_start_steps": self.decay_start_steps,
}
def get_optimizer(optimizer=None):
if not optimizer:
return tf.keras.optimizers.Adam()
else:
return tf.keras.optimizers.get(optimizer)
def get_lr_callable(
global_batch_size, decay_exp, learning_rate, warmup_steps, decay_steps, decay_start_steps
):
return WarmUpAndPolyDecay(
batch_size=global_batch_size,
decay_exp=decay_exp,
learning_rate=learning_rate,
warmup_steps=warmup_steps,
decay_steps=decay_steps,
decay_start_steps=decay_start_steps,
)
class NullScope(object):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
return False
class NullStrategy(object):
def scope(self):
return NullScope()
def run(self, func, *args, **kwargs):
return func(*args, **kwargs)
def gather(self, tensor, axis):
import horovod.tensorflow as hvd
return hvd.allgather(tensor)
def METHOD_NAME(file_pattern, num_pipelines, pipeline_id):
matching_files = glob.glob(file_pattern)
matching_files.sort()
nums_per_shard = len(matching_files) // num_pipelines
return matching_files[pipeline_id * nums_per_shard : (pipeline_id + 1) * nums_per_shard]
def get_distribute_dataset(dataset, strategy, distribute_dataset=True):
if isinstance(strategy, NullStrategy) or not distribute_dataset:
return dataset()
else:
return strategy.distribute_datasets_from_function(
lambda input_context: dataset(input_context), options=tf.distribute.InputOptions()
)
def split_embedding_variables_from_others(model):
if isinstance(model.embedding_layer, SOKEmbedding):
return sok.split_embedding_variable_from_others(model.trainable_variables)
else:
dense_vars = []
for layer in model.layers:
if layer != model.embedding_layer:
dense_vars.extend(layer.trainable_variables)
return model.embedding_layer.trainable_variables, dense_vars
def all_reduce(tensors, combiner="sum", comm_options=None):
if tf.distribute.has_strategy():
replica_ctx = tf.distribute.get_replica_context()
return replica_ctx.all_reduce(combiner, tensors, options=comm_options)
else:
import horovod.tensorflow as hvd
return [hvd.allreduce(tensor) for tensor in tensors]
def all_gather(tensors, axis=0, comm_options=None):
if tf.distribute.has_strategy():
replica_ctx = tf.distribute.get_replica_context()
return replica_ctx.all_gather(tensors, axis=axis, options=comm_options)
else:
import horovod.tensorflow as hvd
return [hvd.allgather(tensor) for tensor in tensors]
def apply_gradients(optimizer, variables, grads, using_sok, aggregate_gradients=False):
if using_sok:
with sok.OptimizerScope(variables):
optimizer.apply_gradients(zip(grads, variables), experimental_aggregate_gradients=False)
else:
optimizer.apply_gradients(
zip(grads, variables), experimental_aggregate_gradients=aggregate_gradients
)
def broadcast_variables(variables):
if tf.distribute.has_strategy():
return
else:
import horovod.tensorflow as hvd
hvd.broadcast_variables(variables, root_rank=0)
def show_logs(logs, strategy, elapsed_time, steps_sec, metrics_threshold, stopper):
for key, value in logs.items():
if hasattr(value, "values"):
logs[key] = value.values[0]
if hasattr(value, "numpy"):
logs[key] = value.numpy()
def no_print():
return
def print_logs():
print("-" * 23, logs["global_step"], "-" * 23)
del logs["global_step"]
for key, value in logs.items():
print(f"{key}: {logs[key]}")
print("elapsed_time:", elapsed_time)
print("steps/sec:", steps_sec)
print("-" * 50)
if isinstance(strategy, NullStrategy):
import horovod.tensorflow as hvd
if hvd.local_rank() != 0:
no_print()
else:
print_logs()
elif os.getenv("OMPI_COMM_WORLD_RANK"):
rank = os.getenv("OMPI_COMM_WORLD_RANK")
if int(rank) != 0:
no_print()
else:
print_logs()
else:
print_logs()
for key, value in metrics_threshold.items():
if logs[key] >= value:
stopper.set_stop(
f"Metric {key}: {logs[key]} meets its " f"threshold {value}, stop training."
)
break | null |
291 | # Downloaded from https://github.com/HazyResearch/state-spaces/blob/06dbbdfd0876501a7f12bf3262121badbc7658af/src/models/functional/toeplitz.py
""" Utilities for computing convolutions.
There are 3 equivalent views:
1. causal convolution
2. multiplication of (lower) triangular Toeplitz matrices
3. polynomial multiplication (mod x^N)
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
def construct_toeplitz(v, f=0.0):
"""Explicit construction of Krylov matrix [v A @ v A^2 @ v ... A^{n-1} @ v]
where A = Z_f. This uses vectorized indexing and cumprod so it's much
faster than using the Krylov function.
Parameters:
v: the starting vector of size n or (rank, n).
f: real number
Returns:
K: Krylov matrix of size (n, n) or (rank, n, n).
"""
n = v.shape[-1]
a = torch.arange(n, device=v.device)
b = -a
indices = a[:, None] + b[None]
K = v[..., indices]
K[..., indices < 0] *= f
return K
def triangular_toeplitz_multiply_(u, v, sum=None):
n = u.shape[-1]
u_expand = F.pad(u, (0, n))
v_expand = F.pad(v, (0, n))
u_f = torch.fft.rfft(u_expand, n=2*n, dim=-1)
v_f = torch.fft.rfft(v_expand, n=2*n, dim=-1)
uv_f = u_f * v_f
if sum is not None:
uv_f = uv_f.sum(dim=sum)
output = torch.fft.irfft(uv_f, n=2*n, dim=-1)[..., :n]
return output
def triangular_toeplitz_multiply_padded_(u, v):
""" Same as triangular_toeplitz_multiply but inputs and output assume to be 0-padded already. """
n = u.shape[-1]
assert n % 2 == 0
u_f = torch.fft.rfft(u, n=n, dim=-1)
v_f = torch.fft.rfft(v, n=n, dim=-1)
uv_f = u_f * v_f
output = torch.fft.irfft(uv_f, n=n, dim=-1)
output[..., n:] = 0
return output
class TriangularToeplitzMult(torch.autograd.Function):
@staticmethod
def forward(ctx, u, v):
ctx.save_for_backward(u, v)
return triangular_toeplitz_multiply_(u, v)
@staticmethod
def backward(ctx, grad):
u, v = ctx.saved_tensors
d_u = triangular_toeplitz_multiply_(grad.flip(-1), v).flip(-1)
d_v = triangular_toeplitz_multiply_(grad.flip(-1), u).flip(-1)
return d_u, d_v
class TriangularToeplitzMultFast(torch.autograd.Function):
@staticmethod
def forward(ctx, u, v):
n = u.shape[-1]
u_expand = F.pad(u, (0, n))
v_expand = F.pad(v, (0, n))
u_f = torch.fft.rfft(u_expand, n=2*n, dim=-1)
v_f = torch.fft.rfft(v_expand, n=2*n, dim=-1)
ctx.save_for_backward(u_f, v_f)
uv_f = u_f * v_f
output = torch.fft.irfft(uv_f, n=2*n, dim=-1)[..., :n]
return output
@staticmethod
def backward(ctx, grad):
u_f, v_f = ctx.saved_tensors
n = grad.shape[-1]
g_expand = F.pad(grad.flip(-1), (0, n))
g_f = torch.fft.rfft(g_expand, n=2*n, dim=-1)
gu_f = g_f * u_f
gv_f = g_f * v_f
d_u = torch.fft.irfft(gv_f, n=2*n, dim=-1)[..., :n]
d_v = torch.fft.irfft(gu_f, n=2*n, dim=-1)[..., :n]
d_u = d_u.flip(-1)
d_v = d_v.flip(-1)
return d_u, d_v
class TriangularToeplitzMultPadded(torch.autograd.Function):
@staticmethod
def forward(ctx, u, v):
ctx.save_for_backward(u, v)
output = triangular_toeplitz_multiply_(u, v)
return output
@staticmethod
def backward(ctx, grad):
u, v = ctx.saved_tensors
d_u = triangular_toeplitz_multiply_padded_(grad.flip(-1), v).flip(-1)
d_v = triangular_toeplitz_multiply_padded_(grad.flip(-1), u).flip(-1)
return d_u, d_v
class TriangularToeplitzMultPaddedFast(torch.autograd.Function):
""" Trade off speed (20-25% faster) for more memory (20-25%) """
@staticmethod
def forward(ctx, u, v):
n = u.shape[-1]
u_f = torch.fft.rfft(u, n=n, dim=-1)
v_f = torch.fft.rfft(v, n=n, dim=-1)
ctx.save_for_backward(u_f, v_f)
uv_f = u_f * v_f
output = torch.fft.irfft(uv_f, n=n, dim=-1)
output[..., n//2:].zero_()
return output
@staticmethod
def backward(ctx, grad):
u_f, v_f = ctx.saved_tensors
n = grad.shape[-1]
g_expand = F.pad(grad[..., :n//2].flip(-1), (0, n//2))
g_f = torch.fft.rfft(g_expand, n=n, dim=-1)
gu_f = g_f * u_f
gv_f = g_f * v_f
d_u = torch.fft.irfft(gv_f, n=n, dim=-1)
d_v = torch.fft.irfft(gu_f, n=n, dim=-1)
d_u[..., n//2:].zero_()
d_v[..., n//2:].zero_()
d_u[..., :n//2] = d_u[..., :n//2].flip(-1) # TODO
d_v[..., :n//2] = d_v[..., :n//2].flip(-1) # TODO
return d_u, d_v
# triangular_toeplitz_multiply = triangular_toeplitz_multiply_
triangular_toeplitz_multiply = TriangularToeplitzMult.apply
triangular_toeplitz_multiply_fast = TriangularToeplitzMultFast.apply
triangular_toeplitz_multiply_padded = TriangularToeplitzMultPadded.apply
triangular_toeplitz_multiply_padded_fast = TriangularToeplitzMultPaddedFast.apply
def METHOD_NAME(u, v, fast=True, pad=False):
if not pad and not fast:
return triangular_toeplitz_multiply(u, v)
if not pad and fast:
return triangular_toeplitz_multiply_fast(u, v)
if pad and not fast:
return triangular_toeplitz_multiply_padded(u, v)
if pad and fast:
return triangular_toeplitz_multiply_padded_fast(u, v) | null |
292 | """
Adapted from a code editor component created
for Enki editor as replacement for QScintilla.
Copyright (C) 2020 Andrei Kopats
Originally licensed under the terms of GNU Lesser General Public License
as published by the Free Software Foundation, version 2.1 of the license.
This is compatible with Orange3's GPL-3.0 license.
""" # pylint: disable=duplicate-code
import unittest
from AnyQt.QtCore import Qt
from AnyQt.QtTest import QTest
from Orange.widgets.data.utils.pythoneditor.tests.base import EditorTest
class Test(EditorTest):
def test_1(self):
# Indent with Tab
self.qpart.indentUseTabs = True
self.qpart.text = 'ab\ncd'
QTest.keyClick(self.qpart, Qt.Key_Down)
QTest.keyClick(self.qpart, Qt.Key_Tab)
self.assertEqual(self.qpart.text, 'ab\n\tcd')
self.qpart.indentUseTabs = False
QTest.keyClick(self.qpart, Qt.Key_Backspace)
QTest.keyClick(self.qpart, Qt.Key_Tab)
self.assertEqual(self.qpart.text, 'ab\n cd')
def test_2(self):
# Unindent Tab
self.qpart.indentUseTabs = True
self.qpart.text = 'ab\n\t\tcd'
self.qpart.cursorPosition = (1, 2)
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, 'ab\n\tcd')
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, 'ab\ncd')
def test_3(self):
# Unindent Spaces
self.qpart.indentUseTabs = False
self.qpart.text = 'ab\n cd'
self.qpart.cursorPosition = (1, 6)
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, 'ab\n cd')
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, 'ab\ncd')
def test_4(self):
# (Un)indent multiline with Tab
self.qpart.indentUseTabs = False
self.qpart.text = ' ab\n cd'
self.qpart.selectedPosition = ((0, 2), (1, 3))
QTest.keyClick(self.qpart, Qt.Key_Tab)
self.assertEqual(self.qpart.text, ' ab\n cd')
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, ' ab\n cd')
def test_4b(self):
# Indent multiline including line with zero selection
self.qpart.indentUseTabs = True
self.qpart.text = 'ab\ncd\nef'
self.qpart.position = (0, 0)
QTest.keyClick(self.qpart, Qt.Key_Down, Qt.ShiftModifier)
QTest.keyClick(self.qpart, Qt.Key_Tab)
self.assertEqual(self.qpart.text, '\tab\ncd\nef')
@unittest.skip # Fantom crashes happen when running multiple tests. TODO find why
def METHOD_NAME(self):
# (Un)indent multiline with Space
self.qpart.indentUseTabs = False
self.qpart.text = ' ab\n cd'
self.qpart.selectedPosition = ((0, 2), (1, 3))
QTest.keyClick(self.qpart, Qt.Key_Space, Qt.ShiftModifier | Qt.ControlModifier)
self.assertEqual(self.qpart.text, ' ab\n cd')
QTest.keyClick(self.qpart, Qt.Key_Backspace, Qt.ShiftModifier | Qt.ControlModifier)
self.assertEqual(self.qpart.text, ' ab\n cd')
def test_6(self):
# (Unindent Tab/Space mix
self.qpart.indentUseTabs = False
self.qpart.text = ' \t \tab'
self.qpart.cursorPosition = ((0, 8))
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, ' \t ab')
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, ' \tab')
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, ' ab')
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, 'ab')
self.qpart.decreaseIndentAction.trigger()
self.assertEqual(self.qpart.text, 'ab')
def test_7(self):
"""Smartly indent python"""
QTest.keyClicks(self.qpart, "def main():")
QTest.keyClick(self.qpart, Qt.Key_Enter)
self.assertEqual(self.qpart.cursorPosition, (1, 4))
QTest.keyClicks(self.qpart, "return 7")
QTest.keyClick(self.qpart, Qt.Key_Enter)
self.assertEqual(self.qpart.cursorPosition, (2, 0))
if __name__ == '__main__':
unittest.main() | null |
293 | ##########################################################################
#
# Copyright (c) 2008-2009, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import math
import imath
import IECore
class LineSegmentTest( unittest.TestCase ) :
def testConstructor( self ) :
l = IECore.LineSegment3f()
p0 = imath.V3f( 1, 2, 3 )
p1 = imath.V3f( 4, 5, 6 )
l = IECore.LineSegment3f( p0, p1 )
self.assertEqual( l.p0, p0 )
self.assertEqual( l.p1, p1 )
def testPointAccess( self ) :
l = IECore.LineSegment3f()
l.p0 = imath.V3f( 1, 2, 3 )
self.assertEqual( l.p0, imath.V3f( 1, 2, 3 ) )
l.p1 = imath.V3f( 4, 5, 6 )
self.assertEqual( l.p1, imath.V3f( 4, 5, 6 ) )
def METHOD_NAME( self ) :
l = IECore.LineSegment3f( imath.V3f( 0 ), imath.V3f( 1 ) )
self.assertEqual( l( 0 ), imath.V3f( 0 ) )
self.assertEqual( l( 1 ), imath.V3f( 1 ) )
self.assertEqual( l( 0.5 ), imath.V3f( 0.5 ) )
self.assertEqual( l( -1 ), imath.V3f( -1 ) )
self.assertEqual( l( 2 ), imath.V3f( 2 ) )
def testLength( self ) :
l = IECore.LineSegment3f( imath.V3f( 1 ), imath.V3f( 2 ) )
self.assertEqual( l.length(), imath.V3f( 1 ).length() )
self.assertEqual( l.length2(), imath.V3f( 1 ).length2() )
def testClosestPointTo( self ) :
l = IECore.LineSegment3f( imath.V3f( 1 ), imath.V3f( 2 ) )
r = imath.Rand32( 100 )
for i in range( 0, 1000 ) :
p = l( r.nextf( 0, 1 ) )
self.assertTrue( l.closestPointTo( p ).equalWithAbsError( p, 0.00001 ) )
for i in range( 0, 1000 ) :
p = l( r.nextf( -1, 0 ) )
self.assertTrue( l.closestPointTo( p ).equalWithAbsError( l.p0, 0.00001 ) )
for i in range( 0, 1000 ) :
p = l( r.nextf( 1, 2 ) )
self.assertTrue( l.closestPointTo( p ).equalWithAbsError( l.p1, 0.00001 ) )
t = l.direction().cross( imath.V3f( 0, 1, 0 ) )
for i in range( 0, 1000 ) :
pl = l( r.nextf( 0, 1 ) )
pt = pl + t * r.nextf( -10, 10 )
self.assertTrue( l.closestPointTo( pt ).equalWithAbsError( pl, 0.00001 ) )
for i in range( 0, 1000 ) :
pl = l( r.nextf( 1, 2 ) )
pt = pl + t * r.nextf( -10, 10 )
self.assertTrue( l.closestPointTo( pt ).equalWithAbsError( l.p1, 0.00001 ) )
for i in range( 0, 1000 ) :
pl = l( r.nextf( -1, 0 ) )
pt = pl + t * r.nextf( -10, 10 )
self.assertTrue( l.closestPointTo( pt ).equalWithAbsError( l.p0, 0.00001 ) )
def testClosestPoints( self ) :
r = imath.Rand32( 100 )
for i in range( 0, 1000 ) :
x = r.nextf( -10, 10 )
y = r.nextf( -10, 10 )
z1 = r.nextf( -10, 10 )
z2 = r.nextf( -10, 10 )
l1 = IECore.LineSegment3f( imath.V3f( -10, y, z1 ), imath.V3f( 10, y, z1 ) )
l2 = IECore.LineSegment3f( imath.V3f( x, -10, z2 ), imath.V3f( x, 10, z2 ) )
p1, p2 = l1.closestPoints( l2 )
p3, p4 = l2.closestPoints( l1 )
self.assertTrue( p1.equalWithAbsError( p4, 0.00001 ) )
self.assertTrue( p2.equalWithAbsError( p3, 0.00001 ) )
# |
# |
# | ------
# |
# |
l1 = IECore.LineSegment3f( imath.V3f( 0, 0, 0 ), imath.V3f( 0, 2, 0 ) )
l2 = IECore.LineSegment3f( imath.V3f( 1, 1, 0 ), imath.V3f( 3, 1, 0 ) )
p1, p2 = l1.closestPoints( l2 )
p3, p4 = l2.closestPoints( l1 )
self.assertEqual( p1, p4 )
self.assertEqual( p2, p3 )
self.assertEqual( p1, imath.V3f( 0, 1, 0 ) )
self.assertEqual( p2, imath.V3f( 1, 1, 0 ) )
# \
# \
#
# /
# /
l1 = IECore.LineSegment3f( imath.V3f( 0, 0, 0 ), imath.V3f( 2, 2, 0 ) )
l2 = IECore.LineSegment3f( imath.V3f( 0, 5, 0 ), imath.V3f( 2, 3, 0 ) )
p1, p2 = l1.closestPoints( l2 )
p3, p4 = l2.closestPoints( l1 )
self.assertEqual( p1, p4 )
self.assertEqual( p2, p3 )
self.assertEqual( p1, imath.V3f( 2, 2, 0 ) )
self.assertEqual( p2, imath.V3f( 2, 3, 0 ) )
def testTransform( self ) :
l1 = IECore.LineSegment3f( imath.V3f( 0, 0, 0 ), imath.V3f( 0, 2, 0 ) )
l2 = IECore.LineSegment3f( l1 )
self.assertEqual( l1, l2 )
t = imath.M44f().translate( imath.V3f( 1 ) )
l3 = l2 * t
self.assertEqual( l1, l2 )
self.assertEqual( l3.p0, l2.p0 + imath.V3f( 1 ) )
self.assertEqual( l3.p1, l2.p1 + imath.V3f( 1 ) )
l1 *= t
self.assertEqual( l1.p0, l2.p0 + imath.V3f( 1 ) )
self.assertEqual( l1.p1, l2.p1 + imath.V3f( 1 ) )
def testIntersect( self ) :
l = IECore.LineSegment3f( imath.V3f( 0, -1, 0 ), imath.V3f( 0, 1, 0 ) )
p = imath.Plane3f( imath.V3f( 0, 1, 0 ), 0 )
self.assertEqual( l.intersect( p ), ( True, imath.V3f( 0, 0, 0 ) ) )
self.assertEqual( l.intersectT( p ), ( True, 0.5 ) )
p = imath.Plane3f( imath.V3f( -1, 0, 0 ), 10 )
self.assertEqual( l.intersect( p )[0], False )
self.assertEqual( l.intersectT( p )[0], False )
def testRepr( self ) :
p0 = imath.V3f( 0, 0, 0 )
p1 = imath.V3f( 0, 0, 0 )
l = IECore.LineSegment3f( p0, p1 )
self.assertEqual( repr(l), "IECore.LineSegment3f( " + repr(p0) + ", " + repr(p1) + " )" )
def testDimensions( self ) :
self.assertEqual( IECore.LineSegment3f.dimensions(), 3 )
self.assertEqual( IECore.LineSegment3d.dimensions(), 3 )
if __name__ == "__main__":
unittest.main() | null |
294 | # Copyright 2019,2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nnabla as nn
import nnabla.parametric_functions as PF
import nnabla.functions as F
from nnabla.logger import logger
import numpy as np
endpoints = {}
def block(x, sh, num_units, stride, end_point, depth_list, sh_cut, shortcut_channels, act_fn=True, atrous_conv=False, atrous_rate=1, res=False, last_block=False, test=False, fix_params=False):
for i in range(0, num_units):
with nn.parameter_scope("unit_"+str(i+1)):
x = unit(x, depth_list, stride, end_point, act_fn=act_fn, atrous_conv=atrous_conv,
atrous_rate=atrous_rate, last_block=last_block, test=test, fix_params=fix_params)
if sh_cut == True:
sh = shortcut(sh, shortcut_channels, not atrous_conv,
test=test, fix_params=fix_params)
x = x+sh
if res == True:
x = x+sh
sh = x
return x
def unit(x, depth_list, stride, end_point, act_fn=True, atrous_conv=False, atrous_rate=1, last_block=False, test=False, fix_params=False):
if last_block == False:
x = F.relu(x)
for i in range(0, len(depth_list)):
if i == 2:
act_fn = False
with nn.parameter_scope("separable_conv"+str(i+1)):
if end_point == True and i == 1:
x = separable_conv_with_bn(x, depth_list[i], stride=False, aspp=False, atrous_rate=atrous_rate,
act_fn=act_fn, last_block=last_block, end_point=end_point, test=test, fix_params=fix_params)
else:
if stride == True and i == 2:
x = separable_conv_with_bn(x, depth_list[i], stride=True, aspp=atrous_conv, atrous_rate=atrous_rate,
act_fn=act_fn, last_block=last_block, test=test, fix_params=fix_params)
else:
x = separable_conv_with_bn(x, depth_list[i], stride=False, aspp=atrous_conv, atrous_rate=atrous_rate,
act_fn=act_fn, last_block=last_block, test=test, fix_params=fix_params)
return x
def shortcut(x, f, stride=True, test=False, fix_params=False):
with nn.parameter_scope("shortcut"):
if (stride == False):
h = PF.convolution(x, f, (1, 1), with_bias=False,
fix_parameters=fix_params)
else:
h = PF.convolution(x, f, (1, 1), stride=(
2, 2), with_bias=False, fix_parameters=fix_params)
h = PF.batch_normalization(
h, batch_stat=not test, eps=1e-03, fix_parameters=fix_params)
return h
def separable_conv_with_bn(x, f, stride=False, aspp=False, atrous_rate=1, act_fn=True, last_block=False, end_point=False, eps=1e-03, out=False, test=False, fix_params=False):
with nn.parameter_scope("depthwise"):
if (stride == True):
h = PF.depthwise_convolution(x, (3, 3), stride=(2, 2), pad=(
1, 1), with_bias=False, fix_parameters=fix_params)
elif (aspp == True):
h = PF.depthwise_convolution(x, (3, 3), pad=(atrous_rate, atrous_rate), stride=(
1, 1), dilation=(atrous_rate, atrous_rate), with_bias=False, fix_parameters=fix_params)
else:
h = PF.depthwise_convolution(x, (3, 3), pad=(
1, 1), with_bias=False, fix_parameters=fix_params)
h = PF.batch_normalization(
h, batch_stat=not test, eps=eps, fix_parameters=fix_params)
if last_block == True:
h = F.relu(h)
with nn.parameter_scope("pointwise"):
h = PF.convolution(h, f, (1, 1), stride=(
1, 1), with_bias=False, fix_parameters=fix_params)
h = PF.batch_normalization(
h, batch_stat=not test, eps=eps, fix_parameters=fix_params)
if end_point == True:
global endpoints
endpoints['Decoder End Point 1'] = h
if act_fn == True:
h = F.relu(h)
return h
def METHOD_NAME(x, num_blocks, depth_list, test=False, fix_params=False):
shortcut_channels = [128, 256, 728]
global endpoints
with nn.parameter_scope("1"):
h = PF.convolution(x, 32, (3, 3), pad=(1, 1), stride=(
2, 2), with_bias=False, fix_parameters=fix_params)
h = F.relu(PF.batch_normalization(h, batch_stat=not test,
eps=1e-03, fix_parameters=fix_params))
with nn.parameter_scope("2"):
h = PF.convolution(h, 64, (3, 3), pad=(
1, 1), with_bias=False, fix_parameters=fix_params)
h = F.relu(PF.batch_normalization(h, batch_stat=not test,
eps=1e-03, fix_parameters=fix_params))
x = h
sh = x
for i in range(0, num_blocks):
with nn.parameter_scope("block"+str(i+1)):
if i == 1:
x = block(x, sh, 1, True, True,
depth_list[i], True, shortcut_channels[i], test=test)
else:
x = block(x, sh, 1, True, False,
depth_list[i], True, shortcut_channels[i], test=test)
sh = x
if i == 2:
endpoints['conv1'] = x
return x
def middle_flow(x, num_blocks, depth_list, test=False, fix_params=False):
shortcut_channels = [0]
sh = x
for i in range(0, num_blocks):
with nn.parameter_scope("block"+str(i+1)):
x = block(x, sh, 16, False, False,
depth_list[i], False, shortcut_channels, res=True, test=test)
sh = x
return x
def exit_flow(x, num_blocks, depth_list, test=False, fix_params=False):
shortcut_channels = [1024, 0]
sh = x
for i in range(0, num_blocks):
with nn.parameter_scope("block"+str(i+1)):
if i == 0:
x = block(x, sh, 1, False, False,
depth_list[i], True, shortcut_channels[i], atrous_conv=True, test=test)
else:
x = block(x, sh, 1, False, False, depth_list[i], False, shortcut_channels[i],
atrous_conv=True, atrous_rate=2, last_block=True, test=test)
sh = x
return x
def xception_65(x, test=False, fix_params=False):
entry_flow_depth_list = [[128, 128, 128], [256, 256, 256], [728, 728, 728]]
middle_flow_depth_list = [[728, 728, 728]]
exit_flow_depth_list = [[728, 1024, 1024], [1536, 1536, 2048]]
with nn.parameter_scope("xception_65"):
with nn.parameter_scope("entry_flow"):
x = METHOD_NAME(x, 3, entry_flow_depth_list,
test=test, fix_params=fix_params)
with nn.parameter_scope("middle_flow"):
x = middle_flow(x, 1, middle_flow_depth_list,
test=test, fix_params=fix_params)
with nn.parameter_scope("exit_flow"):
x = exit_flow(x, 2, exit_flow_depth_list,
test=test, fix_params=fix_params)
x = F.relu(x)
global endpoints
endpoints['Decoder End Point 2'] = x
return endpoints | null |
295 | # coding=utf-8
# Copyright 2023 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""irc_disentanglement dataset."""
import collections
import os
from typing import List
from tensorflow_datasets.core.utils.lazy_imports_utils import tensorflow as tf
import tensorflow_datasets.public_api as tfds
_DOWNLOAD_URL = (
"https://github.com/jkkummerfeld/irc-disentanglement/zipball/fd379e9"
)
_DOWNLOAD_ARCHIVE_SUBDIR = os.path.join(
"jkkummerfeld-irc-disentanglement-fd379e9", "data"
)
_IRC_DAY_KEY = "day"
_MESSAGE_ID = "id"
_MESSAGE_TEXT = "text"
_MESSAGE_TIMESTAMP = "timestamp"
_MESSAGE_PARENTS_IDS = "parents"
def _get_day_to_paths(data_dir):
"""Prepares paths to files with raw chat messages and replies annotations.
Args:
data_dir: directory containing files with data. directory can be
Returns:
day_to_paths: dict formatted date -> dict with paths
day_to_paths[day_str]["text"] - path to file with raw chat messages
day_to_paths[day_str]["annot"] - path to file with replies annotations.
"""
day_to_paths = collections.defaultdict(dict)
for filename in tf.io.gfile.listdir(data_dir):
filepath = os.path.join(data_dir, filename)
day_str = filename[: len("YYYY-MM-DD")] # e.g. 2004-12-25.train-c.raw.txt
if "raw" in filename:
day_to_paths[day_str]["text"] = filepath
if "annotation" in filename:
day_to_paths[day_str]["annot"] = filepath
return day_to_paths
def _read_texts_file(path):
with tf.io.gfile.GFile(path, "r") as f:
return [line.strip() for line in f]
def _read_annot_file(path):
"""Reads file with replies annotation."""
with tf.io.gfile.GFile(path, "r") as f:
return [(int(first), int(second)) for first, second, _ in map(str.split, f)]
def _parse_out_timestamps(raw_texts, day_str):
"""Parsing timestamps from IRC chat messages.
Similar logic is implemented here.
https://github.com/jkkummerfeld/irc-disentanglement/blob/master/src/disentangle.py#L174
Args:
raw_texts: list of raw chat messages.
day_str: formatted date string.
Returns:
texts: list of texts without timestamps.
timestamps: list of formatted timestamps
"""
prev_hours = 0
timestamps, texts = [], []
for raw_text in raw_texts:
if raw_text.startswith("["): # Regular messsages e.g. "[04:13]<xxx>: Hi!"
hours = int(raw_text[1:3])
mins = int(raw_text[4:6])
# 12h format -> 24h format
if hours < prev_hours: # All messages belong to the same day and are
hours += 12 # chronologically ordered, but AM/PM info is absent
prev_hours = hours
timestamps.append("{}_{:02}_{:02}".format(day_str, hours, mins))
raw_text = raw_text[7:]
else: # System messages e.g. "=== xxx has joned #ubuntu"
timestamps.append("")
texts.append(raw_text)
return texts, timestamps
def _get_msg_id(day, line_num):
return "{}_{:05}".format(day, line_num)
def METHOD_NAME(texts_file_path, annot_file_path, day_str):
"""Prepares examples for 1 day."""
# Read raw data
raw_texts = _read_texts_file(texts_file_path)
annotations = _read_annot_file(annot_file_path)
# Construct replies graph
idx_to_parents = {idx: [] for idx in range(len(raw_texts))}
for parent_msg_idx, msg_idx in annotations:
idx_to_parents[msg_idx].append(parent_msg_idx)
texts, timestamps = _parse_out_timestamps(raw_texts, day_str)
for line_idx, parents in idx_to_parents.items():
parents_ids = [_get_msg_id(day_str, parent) for parent in parents]
yield {
_MESSAGE_ID: _get_msg_id(day_str, line_idx),
_MESSAGE_TEXT: texts[line_idx],
_MESSAGE_TIMESTAMP: timestamps[line_idx],
_MESSAGE_PARENTS_IDS: parents_ids,
}
class Builder(tfds.core.GeneratorBasedBuilder):
"""IRC Disentanglement dataset."""
VERSION = tfds.core.Version("2.0.0")
def _info(self) -> tfds.core.DatasetInfo:
return self.dataset_info_from_configs(
features=tfds.features.FeaturesDict(
{
_IRC_DAY_KEY: tfds.features.Sequence(
tfds.features.FeaturesDict({
_MESSAGE_ID: tfds.features.Text(),
_MESSAGE_TEXT: tfds.features.Text(),
_MESSAGE_TIMESTAMP: tfds.features.Text(),
_MESSAGE_PARENTS_IDS: tfds.features.Sequence(
tfds.features.Text()
),
})
)
}
),
homepage="https://jkk.name/irc-disentanglement",
)
def _split_generators(
self, dl_manager: tfds.download.DownloadManager
) -> List[tfds.core.SplitGenerator]:
"""Returns SplitGenerators."""
base_dir = dl_manager.download_and_extract(_DOWNLOAD_URL)
data_dir = os.path.join(base_dir, _DOWNLOAD_ARCHIVE_SUBDIR)
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
gen_kwargs={
"day_to_paths": _get_day_to_paths(
os.path.join(data_dir, "train")
)
},
),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
gen_kwargs={
"day_to_paths": _get_day_to_paths(os.path.join(data_dir, "dev"))
},
),
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
gen_kwargs={
"day_to_paths": _get_day_to_paths(
os.path.join(data_dir, "test")
)
},
),
]
def _generate_examples(self, day_to_paths):
"""Yields examples."""
for day, paths in day_to_paths.items():
yield day, {
_IRC_DAY_KEY: list(
METHOD_NAME(paths["text"], paths["annot"], day)
)
} | null |
296 | """osf/management/commands/metrics_backfill_user_domains.py
Usage:
$ dc-manage metrics_backfill_user_domains --source=$path_to_csv
$ dc-manage metrics_backfill_user_domains --source=$path_to_csv --dry # dry run
$ dc-manage metrics_backfill_user_domains --source=$path_to_csv --resume-from 1264 # start from record 1264
"""
import csv
import logging
import datetime
from django.core.management.base import BaseCommand
from osf.metrics import NewUserDomainReport
logger = logging.getLogger(__name__)
def main(source, dry_run=False, resume_from=None):
if not source:
logger.info('No source file detected, exiting.')
return
# new user domains report is weird, b/c old data needs to be aggregated by date & domain
count = 0
reader = csv.DictReader(source)
tally = {}
this_year = None
for row in reader:
count += 1
if resume_from is not None and count < resume_from:
continue
logger.info('count:({}) this_year:({})'.format(count, this_year))
event_ts = _timestamp_to_dt(row['keen.timestamp'])
event_date = event_ts.date()
event_date_str = str(event_date)
if this_year is None:
logger.info(' >>> setting new year')
this_year = event_date.year
if this_year != event_date.year:
# we've built up a year of data; commit and clear
logger.info(' >>> year is up, committing data')
_upload_data_and_purge(tally, dry_run)
this_year = event_date.year
logger.info(' >>> data committed, new year is:({}) and tally should be '
'empty:({})'.format(this_year, tally))
if event_date_str not in tally:
tally[event_date_str] = {
'timestamp': event_ts,
'report_date': event_date,
'domains': {},
}
domain = row['domain']
if domain not in tally[event_date_str]['domains']:
tally[event_date_str]['domains'][domain] = 0
tally[event_date_str]['domains'][domain] += 1
_upload_data_and_purge(tally, dry_run)
def _upload_data_and_purge(tally, dry_run):
for event_date_str, record in tally.items():
for domain, count in record['domains'].items():
# date(keen.timestamp) => _source.report_date # "2022-12-30",
# keen.created_at => _source.timestamp # "2023-01-02T14:59:05.684642+00:00"
# domain => _source.domain_name # metrics.Keyword()
# count_agg(domain) => _source.new_user_count # metrics.Integer()
something_wonderful = {
'timestamp': record['timestamp'],
'report_date': record['report_date'],
'domain_name': domain,
'new_user_count': count,
}
logger.info(' *** {}::{}::{}'.format(event_date_str, domain, count))
logger.info(' *** {}::{}: something wonderful:({})'.format(event_date_str, domain,
something_wonderful))
if not dry_run:
NewUserDomainReport.record(**something_wonderful)
# purge tally
tally.clear()
def _timestamp_to_dt(timestamp):
return datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=datetime.timezone.utc)
def _timestamp_to_date(timestamp):
dt_obj = _timestamp_to_dt(timestamp)
return str(dt_obj.date())
class Command(BaseCommand):
def METHOD_NAME(self, parser):
super(Command, self).METHOD_NAME(parser)
parser.add_argument(
'--source',
type=open,
help='source file (csv format w/ header line)',
)
parser.add_argument(
'--dry',
dest='dry',
action='store_true',
help='Dry run'
)
parser.add_argument(
'--resume-from',
dest='resume_from',
type=int,
help='start from which record',
)
def handle(self, *args, **options):
dry_run = options.get('dry', None)
source = options.get('source', None)
resume_from = options.get('resume_from', None)
main(source, dry_run, resume_from) | null |
297 | import asyncio
import functools
from decimal import Decimal
from typing import Awaitable, Callable, Optional
from unittest import TestCase
from unittest.mock import AsyncMock
from hummingbot.client.config.client_config_map import ClientConfigMap
from hummingbot.client.config.config_helpers import ClientConfigAdapter
from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_exchange import CoinbaseProExchange
from hummingbot.core.data_type.common import OrderType, TradeType
from hummingbot.core.event.event_logger import EventLogger
from hummingbot.core.event.events import MarketEvent, OrderFilledEvent
class BitfinexExchangeTests(TestCase):
# the level is required to receive logs from the data source logger
level = 0
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.base_asset = "COINALPHA"
cls.quote_asset = "HBOT"
cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}"
cls.symbol = f"{cls.base_asset}{cls.quote_asset}"
cls.listen_key = "TEST_LISTEN_KEY"
def setUp(self) -> None:
super().setUp()
self.log_records = []
self.test_task: Optional[asyncio.Task] = None
self.resume_test_event = asyncio.Event()
self.client_config_map = ClientConfigAdapter(ClientConfigMap())
self.exchange = CoinbaseProExchange(
client_config_map=self.client_config_map,
coinbase_pro_api_key="testAPIKey",
coinbase_pro_secret_key="testSecret",
coinbase_pro_passphrase="testPassphrase",
trading_pairs=[self.trading_pair]
)
self.exchange.logger().setLevel(1)
self.exchange.logger().addHandler(self)
self._initialize_event_loggers()
def tearDown(self) -> None:
self.test_task and self.test_task.cancel()
super().tearDown()
def _initialize_event_loggers(self):
self.buy_order_completed_logger = EventLogger()
self.sell_order_completed_logger = EventLogger()
self.order_filled_logger = EventLogger()
events_and_loggers = [
(MarketEvent.BuyOrderCompleted, self.buy_order_completed_logger),
(MarketEvent.SellOrderCompleted, self.sell_order_completed_logger),
(MarketEvent.OrderFilled, self.order_filled_logger)]
for event, logger in events_and_loggers:
self.exchange.add_listener(event, logger)
def handle(self, record):
self.log_records.append(record)
def _is_logged(self, log_level: str, message: str) -> bool:
return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records)
def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1):
ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout))
return ret
def _return_calculation_and_set_done_event(self, calculation: Callable, *args, **kwargs):
if self.resume_test_event.is_set():
raise asyncio.CancelledError
self.resume_test_event.set()
return calculation(*args, **kwargs)
def METHOD_NAME(self):
self.exchange.start_tracking_order(
order_id="OID1",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal("10000"),
amount=Decimal("1"),
)
order = self.exchange.in_flight_orders.get("OID1")
order.update_exchange_order_id("EOID1")
partial_fill = {
"type": "match",
"trade_id": 1,
"sequence": 50,
"maker_order_id": "EOID1",
"taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1",
"time": "2014-11-07T08:19:27.028459Z",
"product_id": "BTC-USDT",
"size": "0.1",
"price": "10050.0",
"side": "buy",
"taker_user_id": "5844eceecf7e803e259d0365",
"user_id": "5844eceecf7e803e259d0365",
"taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"taker_fee_rate": "0.005"
}
mock_user_stream = AsyncMock()
mock_user_stream.get.side_effect = functools.partial(self._return_calculation_and_set_done_event,
lambda: partial_fill)
self.exchange.user_stream_tracker._user_stream = mock_user_stream
self.test_task = asyncio.get_event_loop().create_task(self.exchange._user_stream_event_listener())
self.async_run_with_timeout(self.resume_test_event.wait())
expected_executed_quote_amount = Decimal(str(partial_fill["size"])) * Decimal(str(partial_fill["price"]))
expected_partial_event_fee = (Decimal(partial_fill["taker_fee_rate"]) *
expected_executed_quote_amount)
self.assertEqual(expected_partial_event_fee, order.fee_paid)
self.assertEqual(1, len(self.order_filled_logger.event_log))
fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0]
self.assertEqual(Decimal("0.005"), fill_event.trade_fee.percent)
self.assertEqual([], fill_event.trade_fee.flat_fees)
self.assertTrue(self._is_logged(
"INFO",
f"Filled {Decimal(partial_fill['size'])} out of {order.amount} of the "
f"{order.order_type_description} order {order.client_order_id}"
))
self.assertEqual(0, len(self.buy_order_completed_logger.event_log))
complete_fill = {
"type": "match",
"trade_id": 2,
"sequence": 50,
"maker_order_id": "EOID1",
"taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1",
"time": "2014-11-07T08:19:27.028459Z",
"product_id": "BTC-USDT",
"size": "0.9",
"price": "10050.0",
"side": "buy",
"taker_user_id": "5844eceecf7e803e259d0365",
"user_id": "5844eceecf7e803e259d0365",
"taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"taker_fee_rate": "0.001"
}
self.resume_test_event = asyncio.Event()
mock_user_stream = AsyncMock()
mock_user_stream.get.side_effect = functools.partial(self._return_calculation_and_set_done_event,
lambda: complete_fill)
self.exchange.user_stream_tracker._user_stream = mock_user_stream
self.test_task = asyncio.get_event_loop().create_task(self.exchange._user_stream_event_listener())
self.async_run_with_timeout(self.resume_test_event.wait())
expected_executed_quote_amount = Decimal(str(complete_fill["size"])) * Decimal(str(complete_fill["price"]))
expected_partial_event_fee += Decimal(complete_fill["taker_fee_rate"]) * expected_executed_quote_amount
self.assertEqual(expected_partial_event_fee, order.fee_paid)
self.assertEqual(2, len(self.order_filled_logger.event_log))
fill_event: OrderFilledEvent = self.order_filled_logger.event_log[1]
self.assertEqual(Decimal("0.001"), fill_event.trade_fee.percent)
self.assertEqual([], fill_event.trade_fee.flat_fees)
# The order should be marked as complete only when the "done" event arrives, not with the fill event
self.assertFalse(self._is_logged(
"INFO",
f"The market buy order {order.client_order_id} has completed according to Coinbase Pro user stream."
))
self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) | null |
298 | # Wrapper module for waagent
#
# waagent is not written as a module. This wrapper module is created
# to use the waagent code as a module.
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import imp
import os
import os.path
#
# The following code will search and load waagent code and expose
# it as a submodule of current module
#
def searchWAAgent():
# if the extension ships waagent in its package to default to this version first
pkg_agent_path = os.path.join(os.getcwd(), 'waagent')
if os.path.isfile(pkg_agent_path):
return pkg_agent_path
agentPath = '/usr/sbin/waagent'
if os.path.isfile(agentPath):
return agentPath
user_paths = os.environ['PYTHONPATH'].split(os.pathsep)
for user_path in user_paths:
agentPath = os.path.join(user_path, 'waagent')
if os.path.isfile(agentPath):
return agentPath
return None
waagent = None
agentPath = searchWAAgent()
if agentPath:
waagent = imp.load_source('waagent', agentPath)
else:
raise Exception("Can't load waagent.")
if not hasattr(waagent, "AddExtensionEvent"):
"""
If AddExtensionEvent is not defined, provide a dummy impl.
"""
def _AddExtensionEvent(*args, **kwargs):
pass
waagent.AddExtensionEvent = _AddExtensionEvent
if not hasattr(waagent, "WALAEventOperation"):
class _WALAEventOperation:
HeartBeat="HeartBeat"
Provision = "Provision"
Install = "Install"
UnIsntall = "UnInstall"
Disable = "Disable"
Enable = "Enable"
Download = "Download"
Upgrade = "Upgrade"
Update = "Update"
waagent.WALAEventOperation = _WALAEventOperation
# Better deal with the silly waagent typo, in anticipation of a proper fix of the typo later on waagent
if not hasattr(waagent.WALAEventOperation, 'Uninstall'):
if hasattr(waagent.WALAEventOperation, 'UnIsntall'):
waagent.WALAEventOperation.Uninstall = waagent.WALAEventOperation.UnIsntall
else: # This shouldn't happen, but just in case...
waagent.WALAEventOperation.Uninstall = 'Uninstall'
def METHOD_NAME():
"""
Get http_proxy and https_proxy from waagent config.
Username and password is not supported now.
This code is adopted from /usr/sbin/waagent
"""
host = None
port = None
try:
waagent.Config = waagent.ConfigurationProvider(None) # Use default waagent conf file (most likely /etc/waagent.conf)
host = waagent.Config.get("HttpProxy.Host")
port = waagent.Config.get("HttpProxy.Port")
except Exception as e:
# waagent.ConfigurationProvider(None) will throw an exception on an old waagent
# Has to silently swallow because logging is not yet available here
# and we don't want to bring that in here. Also if the call fails, then there's
# no proxy config in waagent.conf anyway, so it's safe to silently swallow.
pass
result = ''
if host is not None:
result = "http://" + host
if port is not None:
result += ":" + port
return result
waagent.HttpProxyConfigString = METHOD_NAME()
# end: waagent http proxy config stuff
__ExtensionName__ = None
def InitExtensionEventLog(name):
global __ExtensionName__
__ExtensionName__ = name
def AddExtensionEvent(name=__ExtensionName__,
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message=None):
if name is not None:
waagent.AddExtensionEvent(name=name,
op=op,
isSuccess=isSuccess,
message=message) | null |
299 | ######################################################################
# BioSimSpace: Making biomolecular simulation a breeze!
#
# Copyright: 2017-2023
#
# Authors: Lester Hedges <[email protected]>
#
# BioSimSpace is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BioSimSpace is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BioSimSpace. If not, see <http://www.gnu.org/licenses/>.
#####################################################################
"""Functionality for a production alchecmical free-energy protocol."""
__author__ = "Lester Hedges"
__email__ = "[email protected]"
__all__ = ["FreeEnergy", "FreeEnergyProduction"]
from ._free_energy_mixin import _FreeEnergyMixin
from ._production import Production as _Production
from .. import Types as _Types
from .. import Units as _Units
class FreeEnergyProduction(_Production, _FreeEnergyMixin):
"""A class for storing free energy production protocols."""
def __init__(
self,
lam=0.0,
lam_vals=None,
min_lam=0.0,
max_lam=1.0,
num_lam=11,
timestep=_Types.Time(2, "femtosecond"),
runtime=_Types.Time(4, "nanosecond"),
temperature=_Types.Temperature(300, "kelvin"),
pressure=_Types.Pressure(1, "atmosphere"),
thermostat_time_constant=_Types.Time(1, "picosecond"),
report_interval=200,
restart_interval=1000,
restart=False,
perturbation_type="full",
restraint=None,
force_constant=10 * _Units.Energy.kcal_per_mol / _Units.Area.angstrom2,
):
"""Constructor.
Parameters
----------
lam : float
The perturbation parameter: [0.0, 1.0]
lam_vals : [float]
The list of lambda parameters.
min_lam : float
The minimum lambda value.
max_lam : float
The maximum lambda value.
num_lam : int
The number of lambda values.
timestep : :class:`Time <BioSimSpace.Types.Time>`
The integration timestep.
runtime : :class:`Time <BioSimSpace.Types.Time>`
The running time.
temperature : :class:`Temperature <BioSimSpace.Types.Temperature>`
The temperature.
pressure : :class:`Pressure <BioSimSpace.Types.Pressure>`
The pressure. Pass pressure=None to use the NVT ensemble.
thermostat_time_constant : :class:`Time <BioSimSpace.Types.Time>`
Time constant for thermostat coupling.
report_interval : int
The frequency at which statistics are recorded. (In integration steps.)
restart_interval : int
The frequency at which restart configurations and trajectory
restart : bool
Whether this is a continuation of a previous simulation.
perturbation_type : str
The type of perturbation to perform. Options are:
"full" : A full perturbation of all terms (default option).
"discharge_soft" : Perturb all discharging soft atom charge terms (i.e. value->0.0).
"vanish_soft" : Perturb all vanishing soft atom LJ terms (i.e. value->0.0).
"flip" : Perturb all hard atom terms as well as bonds/angles.
"grow_soft" : Perturb all growing soft atom LJ terms (i.e. 0.0->value).
"charge_soft" : Perturb all charging soft atom LJ terms (i.e. 0.0->value).
Currently perturubation_type != "full" is only supported by
BioSimSpace.Process.Somd.
restraint : str, [int]
The type of restraint to perform. This should be one of the
following options:
"backbone"
Protein backbone atoms. The matching is done by a name
template, so is unreliable on conversion between
molecular file formats.
"heavy"
All non-hydrogen atoms that aren't part of water
molecules or free ions.
"all"
All atoms that aren't part of water molecules or free
ions.
Alternatively, the user can pass a list of atom indices for
more fine-grained control. If None, then no restraints are used.
force_constant : :class:`GeneralUnit <BioSimSpace.Types._GeneralUnit>`, float
The force constant for the restraint potential. If a 'float' is
passed, then default units of 'kcal_per_mol / angstrom**2' will
be used.
"""
# Call the base class constructors.
_Production.__init__(
self,
timestep=timestep,
runtime=runtime,
temperature=temperature,
pressure=pressure,
thermostat_time_constant=thermostat_time_constant,
report_interval=report_interval,
restart_interval=restart_interval,
restart=restart,
restraint=restraint,
force_constant=force_constant,
)
_FreeEnergyMixin.__init__(
self,
lam=lam,
lam_vals=lam_vals,
min_lam=min_lam,
max_lam=max_lam,
num_lam=num_lam,
perturbation_type=perturbation_type,
)
def METHOD_NAME(self):
"""Return a string representation of the parameters."""
return ", ".join(
[_Production.METHOD_NAME(self), _FreeEnergyMixin.METHOD_NAME(self)]
)
def __str__(self):
"""Return a human readable string representation of the object."""
if self._is_customised:
return "<BioSimSpace.Protocol.Custom>"
else:
return f"<BioSimSpace.Protocol.FreeEnergyProduction: {self.METHOD_NAME()}>"
def __repr__(self):
"""Return a string showing how to instantiate the object."""
if self._is_customised:
return "BioSimSpace.Protocol.Custom"
else:
return f"BioSimSpace.Protocol.FreeEnergyProduction({self.METHOD_NAME()})"
# Alias the class for consistency with the old API.
FreeEnergy = FreeEnergyProduction | null |