id
int64 0
6k
| code
stringlengths 4k
8k
| code_compressed
null |
---|---|---|
800 | from flare import html5
from flare.observable import StateHandler
from flare.views.helpers import removeView
from vi.config import conf
class NavigationElement(html5.Div):
# language=HTML
tpl = '''
<div [name]="item" class="item has-hover">
<a class="item-link" @click="navigationAction">
<div class="item-image">
<flare-icon value="{{icon}}" title="{{name}}"></flare-icon>
</div>
<div class="item-content">
<div class="item-headline">{{name}}</div>
</div>
</a>
<span [name]="itemArrow" class="item-open is-hidden" @click="ArrowAction">
<flare-svg-icon value="icon-arrow-left"></flare-svg-icon>
</span>
<span [name]="itemRemove" class="item-pin is-hidden" @click="RemoveAction">
<flare-svg-icon value="icon-cancel"></flare-svg-icon>
</span>
</div>
<div [name]="subItem" class="list list--sub">
</div>
'''
def __init__(self,name,icon=None,view=None,nav=None,closeable=False, opened=False):
super().__init__()
self.view = view
self.nav = nav
self.closeable = closeable
self["class"] = "item-group"
#register state handler
nav.state.register("activeNavigation",self)
self.state = StateHandler( ["hasSubItems"],self )
self.state.register("hasSubItems",self)
self.appendChild(
self.tpl,
icon = icon,
name = name
)
self.state.updateState( "hasSubItems", False )
conf[ "views_state" ].register( "activeView", self )
if self.closeable:
self.itemRemove.removeClass("is-hidden")
if opened:
self.ArrowAction(None)
def METHOD_NAME( self,e,wdg, *args,**kwargs ):
if wdg == self.view:
self.item.addClass( "is-active" )
else:
self.item.removeClass( "is-active" )
def navigationAction( self,e=None,wdg=None):
'''
Handle Click on Navigation Button
'''
if self.view=="notfound" and self.state.getState("hasSubItems"):
self.subItem.toggleClass( "is-active" )
self.itemArrow.toggleClass( "is-active" )
else:
#if we have a linked view, update the view State
if self.view:
conf["views_state"].updateState("activeView", self.view)
#if this element is part of a Navigation, update active State
if self.nav:
self.nav.state.updateState("activeNavigation",self)
def RemoveAction( self,e=None ):
'''
remove this Nav Element
'''
#get previous Navigation Point
previousItem = self.nav.getPreviousNavigationPoint(self.view)
#remove associated View and switch to previous View
removeView(self.view, targetView=previousItem.view)
#remove navpoint
del self.nav.navigationPoints[self.view]
self.parent().removeChild( self )
if self.nav:
self.nav.state.updateState( "activeNavigation", previousItem )
def ArrowAction( self,e, wdg=None ):
self.subItem.toggleClass("is-active")
self.itemArrow.toggleClass("is-active")
def onActiveNavigationChanged( self,e,wdg, *args, **kwargs ):
'''
What should happen if the State from the surrounding Navigation gets an update
'''
if wdg == self:
self.item.addClass("is-active")
else:
self.item.removeClass("is-active")
def onHasSubItemsChanged( self,e,wdg, *args, **kwargs ):
'''
If subChild is added, show itemArrow, hide if no subitem present
'''
if e:
self.itemArrow.show()
else:
self.itemArrow.hide()
def appendSubChild( self,element ):
self.state.updateState("hasSubItems",True)
self.subItem.appendChild(element)
@html5.tag
class NavigationSeperator(html5.Div):
def __init__(self, name=None):
super().__init__()
self.name = name
self[ "class" ] = [ "list-separator", "list-separator--accordion", "is-active" ]
if self.name:
self.buildSeperator()
def buildSeperator( self ):
# language=HTML
self.appendChild( '''
<flare-svg-icon value="icon-dashboard"></flare-svg-icon>
<span class="list-separator-content">%s</span>
<flare-svg-icon value="icon-redo"></flare-svg-icon>
''' % self.name )
def _setValue( self,value ):
self.name = value
self.buildSeperator()
class Navigationblock(html5.Div):
def __init__(self, name, nav):
super().__init__()
self.name = name
self.seperator = None
self.navigation = nav
self[ "class" ] = [ "vi-modulelist", "list" ]
def addSeperator( self ):
#language=HTML
self.appendChild('''
<navigationseperator [name]="seperator" @click="seperatorAction" value="{{name}}"></navigationseperator>
''',
name=self.name)
def seperatorAction( self,e, wdg=None ):
for p in self.navigation.navigationPoints.copy():
self.navigation.removeNavigationPoint(p)
self.navigation.removeAllChildren()
conf["mainWindow"].refresh()
#self.seperator.toggleClass("is-active")
class AppNavigation(html5.Nav):
def __init__(self):
super().__init__()
self.state = StateHandler()
self.state.updateState( "activeNavigation", None )
self.navigationPoints = {}
def getPreviousNavigationPoint(self, view ):
aNav = self.navigationPoints[view]
try:
idx = aNav.parent()._children.index( aNav ) + 1
indexOfItem = max( idx, 0 )
previousItem = aNav.parent()._children[ indexOfItem ]
except:
# Point not in Section
previousItem = self.navigationPoints[list(self.navigationPoints)[-2]] #get last element in the dict
return previousItem
def getNavigationPoint( self,view ):
aNav = self.navigationPoints[ view ]
return aNav
def addNavigationBlock( self, name ):
aBlock = Navigationblock(name,self)
aBlock.addSeperator()
self.appendChild(aBlock)
return aBlock
def addNavigationPoint( self,name,icon,view=None,parent=None,closeable=False, opened=False ):
aNav = NavigationElement(name,icon,view,self,closeable=closeable,opened=opened)
if not parent:
parent = self
if isinstance(parent,NavigationElement):
parent.appendSubChild(aNav)
else:
parent.appendChild(aNav)
self.navigationPoints.update({view:aNav})
if closeable:
aNav.navigationAction()
return aNav
def addNavigationPointAfter( self,name,icon,view=None,beforeElement=None,closeable=False, opened=False ):
aNav = NavigationElement( name, icon, view, self,closeable=closeable,opened=opened )
if beforeElement:
beforeElement.parent().insertAfter( aNav, beforeElement )
else:
self.appendChild(aNav) #append at the end
self.navigationPoints.update({view:aNav})
if closeable:
aNav.navigationAction()
return aNav
def removeNavigationPoint( self,view ):
try:
aNav = self.navigationPoints[ view ]
aNav.RemoveAction()
del self.navigationPoints[view]
return True
except:
return False
| null |
801 | # Copyright (c) 2020 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Native Python MuHash3072 implementation."""
import hashlib
import unittest
from .util import modinv
def rot32(v, bits):
"""Rotate the 32-bit value v left by bits bits."""
bits %= 32 # Make sure the term below does not throw an exception
return ((v << bits) & 0xffffffff) | (v >> (32 - bits))
def chacha20_doubleround(s):
"""Apply a ChaCha20 double round to 16-element state array s.
See https://cr.yp.to/chacha/chacha-20080128.pdf and https://tools.ietf.org/html/rfc8439
"""
QUARTER_ROUNDS = [(0, 4, 8, 12),
(1, 5, 9, 13),
(2, 6, 10, 14),
(3, 7, 11, 15),
(0, 5, 10, 15),
(1, 6, 11, 12),
(2, 7, 8, 13),
(3, 4, 9, 14)]
for a, b, c, d in QUARTER_ROUNDS:
s[a] = (s[a] + s[b]) & 0xffffffff
s[d] = rot32(s[d] ^ s[a], 16)
s[c] = (s[c] + s[d]) & 0xffffffff
s[b] = rot32(s[b] ^ s[c], 12)
s[a] = (s[a] + s[b]) & 0xffffffff
s[d] = rot32(s[d] ^ s[a], 8)
s[c] = (s[c] + s[d]) & 0xffffffff
s[b] = rot32(s[b] ^ s[c], 7)
def chacha20_32_to_384(key32):
"""Specialized ChaCha20 implementation with 32-byte key, 0 IV, 384-byte output."""
# See RFC 8439 section 2.3 for chacha20 parameters
CONSTANTS = [0x61707865, 0x3320646e, 0x79622d32, 0x6b206574]
key_bytes = [0]*8
for i in range(8):
key_bytes[i] = int.from_bytes(key32[(4 * i):(4 * (i+1))], 'little')
INITIALIZATION_VECTOR = [0] * 4
init = CONSTANTS + key_bytes + INITIALIZATION_VECTOR
out = bytearray()
for counter in range(6):
init[12] = counter
s = init.copy()
for _ in range(10):
chacha20_doubleround(s)
for i in range(16):
out.extend(((s[i] + init[i]) & 0xffffffff).to_bytes(4, 'little'))
return bytes(out)
def data_to_num3072(data):
"""Hash a 32-byte array data to a 3072-bit number using 6 Chacha20 operations."""
bytes384 = chacha20_32_to_384(data)
return int.from_bytes(bytes384, 'little')
class MuHash3072:
"""Class representing the MuHash3072 computation of a set.
See https://cseweb.ucsd.edu/~mihir/papers/inchash.pdf and https://lists.linuxfoundation.org/pipermail/bitcoin-dev/2017-May/014337.html
"""
MODULUS = 2**3072 - 1103717
def __init__(self):
"""Initialize for an empty set."""
self.numerator = 1
self.denominator = 1
def METHOD_NAME(self, data):
"""Insert a byte array data in the set."""
data_hash = hashlib.sha256(data).digest()
self.numerator = (self.numerator * data_to_num3072(data_hash)) % self.MODULUS
def remove(self, data):
"""Remove a byte array from the set."""
data_hash = hashlib.sha256(data).digest()
self.denominator = (self.denominator * data_to_num3072(data_hash)) % self.MODULUS
def digest(self):
"""Extract the final hash. Does not modify this object."""
val = (self.numerator * modinv(self.denominator, self.MODULUS)) % self.MODULUS
bytes384 = val.to_bytes(384, 'little')
return hashlib.sha256(bytes384).digest()
class TestFrameworkMuhash(unittest.TestCase):
def test_muhash(self):
muhash = MuHash3072()
muhash.METHOD_NAME(b'\x00' * 32)
muhash.METHOD_NAME((b'\x01' + b'\x00' * 31))
muhash.remove((b'\x02' + b'\x00' * 31))
finalized = muhash.digest()
# This mirrors the result in the C++ MuHash3072 unit test
self.assertEqual(finalized[::-1].hex(), "10d312b100cbd32ada024a6646e40d3482fcff103668d2625f10002a607d5863")
def test_chacha20(self):
def chacha_check(key, result):
self.assertEqual(chacha20_32_to_384(key)[:64].hex(), result)
# Test vectors from https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04#section-7
# Since the nonce is hardcoded to 0 in our function we only use those vectors.
chacha_check([0]*32, "76b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc8b770dc7da41597c5157488d7724e03fb8d84a376a43b8f41518a11cc387b669b2ee6586")
chacha_check([0]*31 + [1], "4540f05a9f1fb296d7736e7b208e3c96eb4fe1834688d2604f450952ed432d41bbe2a0b6ea7566d2a5d1e7e20d42af2c53d792b1c43fea817e9ad275ae546963") | null |
802 | import pytest
from pharmpy.modeling import (
add_covariance_step,
add_estimation_step,
append_estimation_step_options,
remove_covariance_step,
remove_estimation_step,
set_estimation_step,
set_evaluation_step,
)
@pytest.mark.parametrize(
'method,kwargs,code_ref',
[
(
'fo',
{'interaction': False},
'$ESTIMATION METHOD=ZERO MAXEVAL=9990 PRINT=2 POSTHOC',
),
(
'fo',
{'interaction': True},
'$ESTIMATION METHOD=ZERO INTER MAXEVAL=9990 PRINT=2 POSTHOC',
),
(
'fo',
{'tool_options': {'saddle_reset': 1}},
'$ESTIMATION METHOD=ZERO INTER MAXEVAL=9990 PRINT=2 SADDLE_RESET=1',
),
(
'bayes',
{'interaction': True},
'$ESTIMATION METHOD=BAYES INTER MAXEVAL=9990 PRINT=2 POSTHOC',
),
(
'fo',
{'interaction': False, 'evaluation': True, 'maximum_evaluations': None},
'$ESTIMATION METHOD=ZERO MAXEVAL=0 PRINT=2 POSTHOC',
),
],
)
def test_set_estimation_step(testdata, load_model_for_test, method, kwargs, code_ref):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
model = set_estimation_step(model, method, **kwargs)
assert model.model_code.split('\n')[-2] == code_ref
def test_set_estimation_step_est_middle(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
model = set_estimation_step(model, 'FOCE', interaction=True, cov='SANDWICH', idx=0)
assert (
'$ESTIMATION METHOD=COND INTER MAXEVAL=9990 PRINT=2 POSTHOC\n$COVARIANCE'
in model.model_code
)
def test_add_estimation_step(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
assert len(model.estimation_steps) == 1
model = add_estimation_step(model, 'fo')
assert len(model.estimation_steps) == 2
assert model.model_code.split('\n')[-2] == '$ESTIMATION METHOD=ZERO'
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
assert len(model.estimation_steps) == 1
model = add_estimation_step(model, 'fo', evaluation=True)
assert len(model.estimation_steps) == 2
assert model.model_code.split('\n')[-2] == '$ESTIMATION METHOD=ZERO MAXEVAL=0'
def test_add_estimation_step_non_int(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
model = add_estimation_step(model, 'fo', idx=1.0)
with pytest.raises(TypeError, match='Index must be integer'):
add_estimation_step(model, 'fo', idx=1.5)
def test_remove_estimation_step(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
assert len(model.estimation_steps) == 1
model = remove_estimation_step(model, 0)
assert not model.estimation_steps
assert model.model_code.split('\n')[-2] == '$SIGMA 1'
def test_add_covariance_step(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
assert len(model.estimation_steps) == 1
model = add_covariance_step(model, 'SANDWICH')
assert len(model.estimation_steps) == 1
assert model.model_code.split('\n')[-2] == '$COVARIANCE'
model = remove_covariance_step(model)
model = add_covariance_step(model, 'CPG')
assert len(model.estimation_steps) == 1
assert model.model_code.split('\n')[-2] == '$COVARIANCE MATRIX=S'
def test_remove_covariance_step(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
model = add_covariance_step(model, 'SANDWICH')
assert model.model_code.split('\n')[-2] == '$COVARIANCE'
model = remove_covariance_step(model)
assert (
model.model_code.split('\n')[-2]
== '$ESTIMATION METHOD=COND INTER MAXEVAL=9990 PRINT=2 POSTHOC'
)
def METHOD_NAME(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
assert len(model.estimation_steps) == 1
model = append_estimation_step_options(model, {'SADDLE_RESET': 1}, 0)
assert (
model.model_code.split('\n')[-2]
== '$ESTIMATION METHOD=COND INTER MAXEVAL=9990 PRINT=2 POSTHOC SADDLE_RESET=1'
)
def test_set_evaluation_step(testdata, load_model_for_test):
model = load_model_for_test(testdata / 'nonmem' / 'minimal.mod')
model = set_evaluation_step(model)
assert (
model.model_code.split('\n')[-2]
== '$ESTIMATION METHOD=COND INTER MAXEVAL=0 PRINT=2 POSTHOC'
) | null |
803 | from typing import List, Union
import meerkat as mk
def make_test_df(
by: Union[str, List[str]],
ascending: Union[bool, List[bool]] = True,
):
"""Helper function, returns test df."""
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 1, 2]),
"pandas": mk.ScalarColumn([9, 8, 7]),
"numpy": mk.TorchTensorColumn([5, 4, 6]),
}
)
test = df.sort(by=by, ascending=ascending)
return test
def make_tiebreaker_test_df(
by: Union[str, List[str]],
ascending: Union[bool, List[bool]] = True,
):
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 2, 1]),
"pandas": mk.ScalarColumn([9, 7, 9]),
"numpy": mk.TorchTensorColumn([4, 4, 6]),
}
)
test = df.sort(by=by, ascending=ascending)
return test
# flake8: noqa
######## SINGLE COLUMN TESTS ########
def test_sort_by_ascending_tensor_column():
"""Testing all columns after sorting by an ascending tensor column."""
test = make_test_df(by=["tensor"])
assert (
(test["tensor"] == mk.TorchTensorColumn([1, 2, 3])).all()
and (test["pandas"] == mk.ScalarColumn([8, 7, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 6, 5])).all()
)
def test_sort_by_ascending_pandas_on_pandas_column():
"""Testing all columns after sorting by an ascending pandas column."""
test = make_test_df(by=["pandas"])
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 1, 3])).all()
and (test["pandas"] == mk.ScalarColumn([7, 8, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([6, 4, 5])).all()
)
def test_sort_single_numpy_column_ascending():
"""Testing all columns after sorting by an ascending numpy column."""
test = make_test_df(by=["numpy"])
assert (
(test["tensor"] == mk.TorchTensorColumn([1, 3, 2])).all()
and (test["pandas"] == mk.ScalarColumn([8, 9, 7])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 5, 6])).all()
)
# flake8: noqa
######## SINGLE COLUMN TESTS DESCENDING ########
def test_sort_single_tensor_column_descending():
"""Testing all columns after sorting by a descending tensor column."""
test = make_test_df(by=["tensor"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 2, 1])).all()
and (test["pandas"] == mk.ScalarColumn([9, 7, 8])).all()
and (test["numpy"] == mk.TorchTensorColumn([5, 6, 4])).all()
)
def test_sort_single_pandas_column_descending():
"""Testing all columns after sorting by a descending pandas column."""
test = make_test_df(by=["pandas"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 1, 2])).all()
and (test["pandas"] == mk.ScalarColumn([9, 8, 7])).all()
and (test["numpy"] == mk.TorchTensorColumn([5, 4, 6])).all()
)
def test_sort_single_numpy_column_descending():
"""Testing all columns after sorting by a descending numpy column."""
test = make_test_df(by=["numpy"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 3, 1])).all()
and (test["pandas"] == mk.ScalarColumn([7, 9, 8])).all()
and (test["numpy"] == mk.TorchTensorColumn([6, 5, 4])).all()
)
######## MULTIPLE COLUMN TESTS ########
def test_sort_numpy_and_tensor_ascending():
"""# Testing all columns after sorting with multiple ascending columns
(numpy and tensor)"""
test = make_tiebreaker_test_df(by=["numpy", "tensor"], ascending=True)
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 3, 1])).all()
and (test["pandas"] == mk.ScalarColumn([7, 9, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def test_sort_numpy_and_pandas_ascending():
"""Testing all columns after sorting with multiple ascending columns (numpy
and tensor)"""
test = make_tiebreaker_test_df(by=["numpy", "pandas"], ascending=True)
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 3, 1])).all()
and (test["pandas"] == mk.ScalarColumn([7, 9, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def test_sort_numpy_and_pandas_ascending_variable():
"""Testing all columns after sorting with multiple ascending columns (numpy
and tensor)"""
test = make_tiebreaker_test_df(by=["numpy", "pandas"], ascending=[True, False])
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 2, 1])).all()
and (test["pandas"] == mk.ScalarColumn([9, 7, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def test_sort_numpy_and_pandas_and_tensor_ascending():
"""Testing all columns after sorting with multiple ascending columns (numpy
and pandas and tensor)"""
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 2, 1]),
"pandas": mk.ScalarColumn([9, 7, 7]),
"numpy": mk.TorchTensorColumn([6, 4, 4]),
}
)
test = df.sort(by=["numpy", "pandas", "tensor"], ascending=True)
assert (
(test["tensor"] == mk.TorchTensorColumn([1, 2, 3])).all()
and (test["pandas"] == mk.ScalarColumn([7, 7, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def METHOD_NAME():
"""Testing all columns after sorting with multiple ascending columns
(tensor and pandas)."""
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 2, 2]),
"pandas": mk.ScalarColumn([9, 8, 7]),
"numpy": mk.TorchTensorColumn([6, 4, 4]),
}
)
test = df.sort(by=["tensor", "pandas"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 2, 2])).all()
and (test["pandas"] == mk.ScalarColumn([9, 8, 7])).all()
and (test["numpy"] == mk.TorchTensorColumn([6, 4, 4])).all()
)
def test_sort_with_store():
df = mk.DataFrame({"tensor": mk.TorchTensorColumn([3, 2, 4])})
test = df.sort(by=mk.Store("tensor"), ascending=True)
assert (test["tensor"] == mk.TorchTensorColumn([2, 3, 4])).all() | null |
804 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class ReplaceSystemDiskRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'ReplaceSystemDisk','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ImageId(self): # String
return self.get_query_params().get('ImageId')
def set_ImageId(self, ImageId): # String
self.add_query_param('ImageId', ImageId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_EncryptAlgorithm(self): # String
return self.get_query_params().get('EncryptAlgorithm')
def set_EncryptAlgorithm(self, EncryptAlgorithm): # String
self.add_query_param('EncryptAlgorithm', EncryptAlgorithm)
def get_SecurityEnhancementStrategy(self): # String
return self.get_query_params().get('SecurityEnhancementStrategy')
def set_SecurityEnhancementStrategy(self, SecurityEnhancementStrategy): # String
self.add_query_param('SecurityEnhancementStrategy', SecurityEnhancementStrategy)
def get_KeyPairName(self): # String
return self.get_query_params().get('KeyPairName')
def set_KeyPairName(self, KeyPairName): # String
self.add_query_param('KeyPairName', KeyPairName)
def get_Platform(self): # String
return self.get_query_params().get('Platform')
def set_Platform(self, Platform): # String
self.add_query_param('Platform', Platform)
def get_Password(self): # String
return self.get_query_params().get('Password')
def set_Password(self, Password): # String
self.add_query_param('Password', Password)
def get_PasswordInherit(self): # Boolean
return self.get_query_params().get('PasswordInherit')
def set_PasswordInherit(self, PasswordInherit): # Boolean
self.add_query_param('PasswordInherit', PasswordInherit)
def get_DiskId(self): # String
return self.get_query_params().get('DiskId')
def set_DiskId(self, DiskId): # String
self.add_query_param('DiskId', DiskId)
def get_Arns(self): # RepeatList
return self.get_query_params().get('Arn')
def set_Arns(self, Arn): # RepeatList
for depth1 in range(len(Arn)):
if Arn[depth1].get('Rolearn') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.Rolearn', Arn[depth1].get('Rolearn'))
if Arn[depth1].get('RoleType') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.RoleType', Arn[depth1].get('RoleType'))
if Arn[depth1].get('AssumeRoleFor') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.AssumeRoleFor', Arn[depth1].get('AssumeRoleFor'))
def get_Architecture(self): # String
return self.get_query_params().get('Architecture')
def set_Architecture(self, Architecture): # String
self.add_query_param('Architecture', Architecture)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_SystemDiskSize(self): # Integer
return self.get_query_params().get('SystemDisk.Size')
def METHOD_NAME(self, SystemDiskSize): # Integer
self.add_query_param('SystemDisk.Size', SystemDiskSize)
def get_Encrypted(self): # Boolean
return self.get_query_params().get('Encrypted')
def set_Encrypted(self, Encrypted): # Boolean
self.add_query_param('Encrypted', Encrypted)
def get_KMSKeyId(self): # String
return self.get_query_params().get('KMSKeyId')
def set_KMSKeyId(self, KMSKeyId): # String
self.add_query_param('KMSKeyId', KMSKeyId)
def get_UseAdditionalService(self): # Boolean
return self.get_query_params().get('UseAdditionalService')
def set_UseAdditionalService(self, UseAdditionalService): # Boolean
self.add_query_param('UseAdditionalService', UseAdditionalService) | null |
805 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdomain.endpoint import endpoint_data
class VerifyContactFieldRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Domain', '2018-01-29', 'VerifyContactField')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Country(self): # String
return self.get_query_params().get('Country')
def set_Country(self, Country): # String
self.add_query_param('Country', Country)
def get_City(self): # String
return self.get_query_params().get('City')
def set_City(self, City): # String
self.add_query_param('City', City)
def get_ZhCity(self): # String
return self.get_query_params().get('ZhCity')
def set_ZhCity(self, ZhCity): # String
self.add_query_param('ZhCity', ZhCity)
def get_TelExt(self): # String
return self.get_query_params().get('TelExt')
def set_TelExt(self, TelExt): # String
self.add_query_param('TelExt', TelExt)
def get_Province(self): # String
return self.get_query_params().get('Province')
def set_Province(self, Province): # String
self.add_query_param('Province', Province)
def get_ZhRegistrantName(self): # String
return self.get_query_params().get('ZhRegistrantName')
def set_ZhRegistrantName(self, ZhRegistrantName): # String
self.add_query_param('ZhRegistrantName', ZhRegistrantName)
def get_PostalCode(self): # String
return self.get_query_params().get('PostalCode')
def set_PostalCode(self, PostalCode): # String
self.add_query_param('PostalCode', PostalCode)
def get_Lang(self): # String
return self.get_query_params().get('Lang')
def set_Lang(self, Lang): # String
self.add_query_param('Lang', Lang)
def get_Email(self): # String
return self.get_query_params().get('Email')
def set_Email(self, Email): # String
self.add_query_param('Email', Email)
def get_ZhRegistrantOrganization(self): # String
return self.get_query_params().get('ZhRegistrantOrganization')
def set_ZhRegistrantOrganization(self, ZhRegistrantOrganization): # String
self.add_query_param('ZhRegistrantOrganization', ZhRegistrantOrganization)
def get_Address(self): # String
return self.get_query_params().get('Address')
def set_Address(self, Address): # String
self.add_query_param('Address', Address)
def get_TelArea(self): # String
return self.get_query_params().get('TelArea')
def set_TelArea(self, TelArea): # String
self.add_query_param('TelArea', TelArea)
def get_ZhAddress(self): # String
return self.get_query_params().get('ZhAddress')
def set_ZhAddress(self, ZhAddress): # String
self.add_query_param('ZhAddress', ZhAddress)
def get_RegistrantType(self): # String
return self.get_query_params().get('RegistrantType')
def set_RegistrantType(self, RegistrantType): # String
self.add_query_param('RegistrantType', RegistrantType)
def get_DomainName(self): # String
return self.get_query_params().get('DomainName')
def set_DomainName(self, DomainName): # String
self.add_query_param('DomainName', DomainName)
def METHOD_NAME(self): # String
return self.get_query_params().get('Telephone')
def set_Telephone(self, Telephone): # String
self.add_query_param('Telephone', Telephone)
def get_ZhProvince(self): # String
return self.get_query_params().get('ZhProvince')
def set_ZhProvince(self, ZhProvince): # String
self.add_query_param('ZhProvince', ZhProvince)
def get_RegistrantOrganization(self): # String
return self.get_query_params().get('RegistrantOrganization')
def set_RegistrantOrganization(self, RegistrantOrganization): # String
self.add_query_param('RegistrantOrganization', RegistrantOrganization)
def get_UserClientIp(self): # String
return self.get_query_params().get('UserClientIp')
def set_UserClientIp(self, UserClientIp): # String
self.add_query_param('UserClientIp', UserClientIp)
def get_RegistrantName(self): # String
return self.get_query_params().get('RegistrantName')
def set_RegistrantName(self, RegistrantName): # String
self.add_query_param('RegistrantName', RegistrantName) | null |
806 | from pyrokinetics.gk_code import GKOutputReaderCGYRO
from pyrokinetics.gk_code.gk_output import GKOutput
from pyrokinetics import template_dir, Pyro
from pathlib import Path
import numpy as np
import pytest
from .utils import array_similar
# TODO mock output tests, similar to GS2
@pytest.fixture(scope="module")
def cgyro_tmp_path(tmp_path_factory):
tmp_dir = tmp_path_factory.mktemp("test_gk_output_reader_cgyro")
return tmp_dir
@pytest.fixture
def reader():
return GKOutputReaderCGYRO()
@pytest.fixture
def cgyro_output_dir(cgyro_tmp_path):
mock_dir = cgyro_tmp_path / "mock_dir"
mock_dir.mkdir()
required_files = GKOutputReaderCGYRO._required_files(mock_dir)
for required_file in required_files.values():
with open(required_file.path, "w") as _:
pass
return mock_dir
@pytest.fixture
def cgyro_output_dir_missing_file(cgyro_tmp_path):
mock_dir = cgyro_tmp_path / "broken_mock_dir"
mock_dir.mkdir()
required_files = GKOutputReaderCGYRO._required_files(mock_dir)
skip = True
for required_file in required_files.values():
if skip:
skip = False
continue
with open(required_file.path, "w") as _:
pass
return mock_dir
@pytest.fixture
def not_cgyro_dir(cgyro_tmp_path):
filename = cgyro_tmp_path / "hello_world.txt"
with open(filename, "w") as file:
file.write("hello world!")
return filename
def METHOD_NAME(reader, cgyro_output_dir):
# Expect exception to be raised if this fails
reader.verify_file_type(cgyro_output_dir)
def test_verify_cgyro_missing_file(reader, cgyro_output_dir_missing_file):
with pytest.raises(Exception):
reader.verify_file_type(cgyro_output_dir_missing_file)
def test_verify_not_cgyro_dir(reader, not_cgyro_dir):
with pytest.raises(Exception):
reader.verify_file_type(not_cgyro_dir)
def test_infer_path_from_input_file_cgyro():
input_path = Path("dir/to/input.cgyro")
output_path = GKOutputReaderCGYRO.infer_path_from_input_file(input_path)
assert output_path == Path("dir/to/")
# Golden answer tests
# Compares against results obtained using GKCode methods from commit 7d551eaa
# Update: Commit d3da468c accounts for new gkoutput structure
# This data was gathered from templates/outputs/CGYRO_linear
reference_data_commit_hash = "d3da468c"
@pytest.fixture(scope="class")
def golden_answer_reference_data(request):
this_dir = Path(__file__).parent
cdf_path = (
this_dir
/ "golden_answers"
/ f"cgyro_linear_output_{reference_data_commit_hash}.netcdf4"
)
request.cls.reference_data = GKOutput.from_netcdf(cdf_path)
@pytest.fixture(scope="class")
def golden_answer_data(request):
path = template_dir / "outputs" / "CGYRO_linear"
pyro = Pyro(gk_file=path / "input.cgyro", name="test_gk_output_cgyro")
norm = pyro.norms
request.cls.data = GKOutputReaderCGYRO().read_from_file(path, norm=norm)
@pytest.mark.usefixtures("golden_answer_reference_data", "golden_answer_data")
class TestCGYROGoldenAnswers:
def test_coords(self):
"""
Ensure that all reference coords are present in data
"""
for c in self.reference_data.coords:
dtype = self.reference_data[c].dtype
if dtype == "float64" or dtype == "complex128":
assert array_similar(self.reference_data[c], self.data[c])
else:
assert np.array_equal(self.reference_data[c], self.data[c])
@pytest.mark.parametrize(
"var",
[
"phi",
"particle",
"momentum",
"heat",
"eigenvalues",
"eigenfunctions",
"growth_rate",
"mode_frequency",
],
)
def test_data_vars(self, var):
assert array_similar(self.reference_data[var], self.data[var])
@pytest.mark.parametrize(
"attr",
[
"linear",
"gk_code",
"input_file",
"attribute_units",
"title",
"growth_rate_tolerance",
],
)
def test_data_attrs(self, attr):
if isinstance(getattr(self.reference_data, attr), float):
assert np.isclose(
getattr(self.reference_data, attr), getattr(self.data, attr)
)
else:
assert getattr(self.reference_data, attr) == getattr(self.data, attr) | null |
807 | from json import dumps
from django.core.handlers.wsgi import WSGIHandler
from django.contrib.staticfiles.handlers import StaticFilesHandler
from django.test.signals import template_rendered
from django.core import signals
from django.test.client import store_rendered_templates
from functools import partial
try:
from django.db import close_old_connections
except ImportError:
from django.db import close_connection
close_old_connections = None
from webtest.utils import NoDefault
from webtest_plus import TestApp
class JSONAPIWrapper(object):
"""
Creates wrapper with stated content_type.
"""
def make_wrapper(self, url, method, content_type, params=NoDefault, **kw):
"""
Helper method for generating wrapper method.
"""
if params is not NoDefault:
params = dumps(params, cls=self.JSONEncoder)
kw.update(
params=params,
content_type=content_type,
upload_files=None,
)
wrapper = self._gen_request(method, url, **kw)
subst = dict(lmethod=method.lower(), method=method)
wrapper.__name__ = str('%(lmethod)s_json_api' % subst)
return wrapper
class JSONAPITestApp(TestApp, JSONAPIWrapper):
"""
Extends TestApp to add json_api_methods(post, put, patch, and delete)
which put content_type 'application/vnd.api+json' in header. Adheres to
JSON API spec.
"""
def __init__(self, *args, **kwargs):
super(JSONAPITestApp, self).__init__(self.get_wsgi_handler(), *args, **kwargs)
self.auth = None
self.auth_type = 'basic'
def get_wsgi_handler(self):
return StaticFilesHandler(WSGIHandler())
# From django-webtest (MIT Licensed, see NOTICE for license details)
def do_request(self, req, status, expect_errors):
# Django closes the database connection after every request;
# this breaks the use of transactions in your tests.
if close_old_connections is not None: # Django 1.6+
signals.request_started.disconnect(close_old_connections)
signals.request_finished.disconnect(close_old_connections)
else: # Django < 1.6
signals.request_finished.disconnect(close_connection)
try:
req.environ.setdefault('REMOTE_ADDR', '127.0.0.1')
# is this a workaround for
# https://code.djangoproject.com/ticket/11111 ?
req.environ['REMOTE_ADDR'] = str(req.environ['REMOTE_ADDR'])
req.environ['PATH_INFO'] = str(req.environ['PATH_INFO'])
auth = req.environ.get('HTTP_AUTHORIZATION')
if auth is None:
req.environ['HTTP_AUTHORIZATION'] = 'None'
elif isinstance(auth, bytes):
req.environ['HTTP_AUTHORIZATION'] = auth.decode()
else:
req.environ['HTTP_AUTHORIZATION'] = str(auth)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
template_rendered.connect(on_template_render)
response = super(JSONAPITestApp, self).do_request(req, status,
expect_errors)
# Add any rendered template detail to the response.
# If there was only one template rendered (the most likely case),
# flatten the list to a single element.
def METHOD_NAME(detail):
if len(data[detail]) == 1:
return data[detail][0]
return data[detail]
response.context = None
response.template = None
response.templates = data.get('templates', None)
if data.get('context'):
response.context = METHOD_NAME('context')
if data.get('template'):
response.template = METHOD_NAME('template')
elif data.get('templates'):
response.template = METHOD_NAME('templates')
return response
finally:
if close_old_connections: # Django 1.6+
signals.request_started.connect(close_old_connections)
signals.request_finished.connect(close_old_connections)
else: # Django < 1.6
signals.request_finished.connect(close_connection)
def json_api_method(method):
def wrapper(self, url, params=NoDefault, bulk=False, **kw):
content_type = 'application/vnd.api+json'
if bulk:
content_type = 'application/vnd.api+json; ext=bulk'
return JSONAPIWrapper.make_wrapper(self, url, method, content_type, params, **kw)
return wrapper
post_json_api = json_api_method('POST')
put_json_api = json_api_method('PUT')
patch_json_api = json_api_method('PATCH')
delete_json_api = json_api_method('DELETE') | null |
808 | from typing import Optional
import arm.material.cycles as cycles
import arm.material.mat_state as mat_state
import arm.material.make_skin as make_skin
import arm.material.make_particle as make_particle
import arm.material.make_inst as make_inst
import arm.material.make_tess as make_tess
import arm.material.make_morph_target as make_morph_target
from arm.material.shader import Shader, ShaderContext
import arm.utils
if arm.is_reload(__name__):
cycles = arm.reload_module(cycles)
mat_state = arm.reload_module(mat_state)
make_skin = arm.reload_module(make_skin)
make_particle = arm.reload_module(make_particle)
make_inst = arm.reload_module(make_inst)
make_tess = arm.reload_module(make_tess)
make_morph_target = arm.reload_module(make_morph_target)
arm.material.shader = arm.reload_module(arm.material.shader)
from arm.material.shader import Shader, ShaderContext
arm.utils = arm.reload_module(arm.utils)
else:
arm.enable_reload(__name__)
def METHOD_NAME(vert):
billboard = mat_state.material.arm_billboard
particle = mat_state.material.arm_particle_flag
# Particles
if particle:
if arm.utils.get_rp().arm_particles == 'On':
make_particle.write(vert, particle_info=cycles.particle_info)
# Billboards
if billboard == 'spherical':
vert.add_uniform('mat4 WV', '_worldViewMatrix')
vert.add_uniform('mat4 P', '_projectionMatrix')
vert.write('gl_Position = P * (WV * vec4(0.0, 0.0, spos.z, 1.0) + vec4(spos.x, spos.y, 0.0, 0.0));')
else:
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrix')
vert.write('gl_Position = WVP * spos;')
else:
# Billboards
if billboard == 'spherical':
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrixSphere')
elif billboard == 'cylindrical':
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrixCylinder')
else: # off
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrix')
vert.write('gl_Position = WVP * spos;')
def write_norpos(con_mesh: ShaderContext, vert: Shader, declare=False, write_nor=True):
is_bone = con_mesh.is_elem('bone')
is_morph = con_mesh.is_elem('morph')
if is_morph:
make_morph_target.morph_pos(vert)
if is_bone:
make_skin.skin_pos(vert)
if write_nor:
prep = 'vec3 ' if declare else ''
if is_morph:
make_morph_target.morph_nor(vert, is_bone, prep)
if is_bone:
make_skin.skin_nor(vert, is_morph, prep)
if not is_morph and not is_bone:
vert.write_attrib(prep + 'wnormal = normalize(N * vec3(nor.xy, pos.w));')
if con_mesh.is_elem('ipos'):
make_inst.inst_pos(con_mesh, vert)
def write_tex_coords(con_mesh: ShaderContext, vert: Shader, frag: Shader, tese: Optional[Shader]):
rpdat = arm.utils.get_rp()
if con_mesh.is_elem('tex'):
vert.add_out('vec2 texCoord')
vert.add_uniform('float texUnpack', link='_texUnpack')
if mat_state.material.arm_tilesheet_flag:
if mat_state.material.arm_particle_flag and rpdat.arm_particles == 'On':
make_particle.write_tilesheet(vert)
else:
vert.add_uniform('vec2 tilesheetOffset', '_tilesheetOffset')
vert.write_attrib('texCoord = tex * texUnpack + tilesheetOffset;')
else:
vert.write_attrib('texCoord = tex * texUnpack;')
if tese is not None:
tese.write_pre = True
make_tess.interpolate(tese, 'texCoord', 2, declare_out=frag.contains('texCoord'))
tese.write_pre = False
if con_mesh.is_elem('tex1'):
vert.add_out('vec2 texCoord1')
vert.add_uniform('float texUnpack', link='_texUnpack')
vert.write_attrib('texCoord1 = tex1 * texUnpack;')
if tese is not None:
tese.write_pre = True
make_tess.interpolate(tese, 'texCoord1', 2, declare_out=frag.contains('texCoord1'))
tese.write_pre = False | null |
809 | """
HAR Formatter for REDbot.
"""
from html.parser import HTMLParser
import operator
import re
import textwrap
from typing import Any, List
import thor.http.error as httperr
from redbot.formatter import Formatter
from redbot.message import HttpResponse
from redbot.resource import HttpResource
from redbot.speak import Note, levels, categories
NL = "\n"
class BaseTextFormatter(Formatter):
"""
Base class for text formatters."""
media_type = "text/plain"
note_categories = [
categories.GENERAL,
categories.SECURITY,
categories.CONNECTION,
categories.CONNEG,
categories.CACHING,
categories.VALIDATION,
categories.RANGE,
]
link_order = [
("link", "Head Links"),
("script", "Script Links"),
("frame", "Frame Links"),
("iframe", "IFrame Links"),
("img", "Image Links"),
]
error_template = "Error: %s\n"
def __init__(self, *args: Any, **kw: Any) -> None:
Formatter.__init__(self, *args, **kw)
self.verbose = False
def start_output(self) -> None:
pass
def METHOD_NAME(self, sample: bytes) -> None:
pass
def status(self, status: str) -> None:
pass
def finish_output(self) -> None:
"Fill in the template with RED's results."
if self.resource.response.complete:
self.output(
NL.join(
[self.format_headers(r) for r in self.resource.nonfinal_responses]
)
+ NL
+ NL
)
self.output(self.format_headers(self.resource.response) + NL + NL)
self.output(self.format_recommendations(self.resource) + NL)
else:
if self.resource.response.http_error is None:
pass
elif isinstance(self.resource.response.http_error, httperr.HttpError):
self.output(
self.error_template % self.resource.response.http_error.desc
)
else:
raise AssertionError("Unknown incomplete response error.")
def error_output(self, message: str) -> None:
self.output(self.error_template % message)
@staticmethod
def format_headers(response: HttpResponse) -> str:
out = [
f"HTTP/{response.version} {response.status_code} {response.status_phrase}"
]
return NL.join(out + [f"{h[0]}:{h[1]}" for h in response.headers])
def format_recommendations(self, resource: HttpResource) -> str:
return "".join(
[
self.format_recommendation(resource, category)
for category in self.note_categories
]
)
def format_recommendation(
self, resource: HttpResource, category: categories
) -> str:
notes = [note for note in resource.notes if note.category == category]
if not notes:
return ""
out = []
if list(notes):
out.append(f"* {category.value}:")
for note in notes:
out.append(f" * {self.colorize(note.level, note.show_summary('en'))}")
if self.verbose:
out.append("")
out.extend(" " + line for line in self.format_text(note))
out.append("")
out.append(NL)
return NL.join(out)
@staticmethod
def format_text(note: Note) -> List[str]:
return textwrap.wrap(
strip_tags(re.sub(r"(?m)\s\s+", " ", note.show_text("en")))
)
def colorize(self, level: levels, instr: str) -> str:
if self.kw.get("tty_out", False):
# info
color_start = "\033[0;32m"
color_end = "\033[0;39m"
if level == levels.GOOD:
color_start = "\033[1;32m"
color_end = "\033[0;39m"
if level == levels.BAD:
color_start = "\033[1;31m"
color_end = "\033[0;39m"
if level == levels.WARN:
color_start = "\033[1;33m"
color_end = "\033[0;39m"
else:
color_start = "\033[1;34m"
color_end = "\033[0;39m"
return color_start + instr + color_end
return instr
class TextFormatter(BaseTextFormatter):
"""
Format a REDbot object as text.
"""
name = "txt"
media_type = "text/plain"
def __init__(self, *args: Any, **kw: Any) -> None:
BaseTextFormatter.__init__(self, *args, **kw)
def finish_output(self) -> None:
BaseTextFormatter.finish_output(self)
class VerboseTextFormatter(TextFormatter):
name = "txt_verbose"
def __init__(self, *args: Any, **kw: Any) -> None:
TextFormatter.__init__(self, *args, **kw)
self.verbose = True
class TextListFormatter(BaseTextFormatter):
"""
Format multiple REDbot responses as a textual list.
"""
name = "text"
media_type = "text/plain"
can_multiple = True
def __init__(self, *args: Any, **kw: Any) -> None:
BaseTextFormatter.__init__(self, *args, **kw)
def finish_output(self) -> None:
"Fill in the template with RED's results."
BaseTextFormatter.finish_output(self)
sep = "=" * 78
for hdr_tag, heading in self.link_order:
subresources = [d[0] for d in self.resource.linked if d[1] == hdr_tag]
self.output(f"{sep}{NL}{heading} ({len(subresources)}){NL}{sep}{NL}")
if subresources:
subresources.sort(key=operator.attrgetter("request.uri"))
for subresource in subresources:
self.output(self.format_uri(subresource) + NL + NL)
self.output(self.format_headers(subresource.response) + NL + NL)
self.output(self.format_recommendations(subresource) + NL + NL)
def format_uri(self, resource: HttpResource) -> str:
return self.colorize(None, resource.request.uri)
class VerboseTextListFormatter(TextListFormatter):
name = "txt_verbose"
def __init__(self, *args: Any, **kw: Any) -> None:
TextListFormatter.__init__(self, *args, **kw)
self.verbose = True
class MLStripper(HTMLParser):
def __init__(self) -> None:
HTMLParser.__init__(self)
self.reset()
self.fed: List[str] = []
def handle_data(self, data: str) -> None:
self.fed.append(data)
def get_data(self) -> str:
return "".join(self.fed)
def error(self, message: str) -> None:
pass
def strip_tags(html: str) -> str:
stripper = MLStripper()
stripper.METHOD_NAME(html)
return stripper.get_data() | null |
810 | import datetime
import decimal
import functools
import typing
import uuid
from dateutil import parser, tz
class CustomIsoparser(parser.isoparser):
def __init__(self, sep: typing.Optional[str] = None):
"""
:param sep:
A single character that separates date and time portions. If
``None``, the parser will accept any single character.
For strict ISO-8601 adherence, pass ``'T'``.
"""
if sep is not None:
if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'):
raise ValueError('Separator must be a single, non-numeric ' +
'ASCII character')
used_sep = sep.encode('ascii')
else:
used_sep = None
self._sep = used_sep
@staticmethod
def __get_ascii_bytes(str_in: str) -> bytes:
# If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII
# ASCII is the same in UTF-8
try:
return str_in.encode('ascii')
except UnicodeEncodeError as e:
msg = 'ISO-8601 strings should contain only ASCII characters'
raise ValueError(msg) from e
def __parse_isodate(self, dt_str: str) -> typing.Tuple[typing.Tuple[int, int, int], int]:
dt_str_ascii = self.__get_ascii_bytes(dt_str)
values = self._parse_isodate(dt_str_ascii) # type: ignore
values = typing.cast(typing.Tuple[typing.List[int], int], values)
components = typing.cast( typing.Tuple[int, int, int], tuple(values[0]))
pos = values[1]
return components, pos
def __parse_isotime(self, dt_str: str) -> typing.Tuple[int, int, int, int, typing.Optional[typing.Union[tz.tzutc, tz.tzoffset]]]:
dt_str_ascii = self.__get_ascii_bytes(dt_str)
values = self._parse_isotime(dt_str_ascii) # type: ignore
components: typing.Tuple[int, int, int, int, typing.Optional[typing.Union[tz.tzutc, tz.tzoffset]]] = tuple(values) # type: ignore
return components
def parse_isodatetime(self, dt_str: str) -> datetime.datetime:
date_components, pos = self.__parse_isodate(dt_str)
if len(dt_str) <= pos:
# len(components) <= 3
raise ValueError('Value is not a datetime')
if self._sep is None or dt_str[pos:pos + 1] == self._sep:
hour, minute, second, microsecond, tzinfo = self.__parse_isotime(dt_str[pos + 1:])
if hour == 24:
hour = 0
components = (*date_components, hour, minute, second, microsecond, tzinfo)
return datetime.datetime(*components) + datetime.timedelta(days=1)
else:
components = (*date_components, hour, minute, second, microsecond, tzinfo)
else:
raise ValueError('String contains unknown ISO components')
return datetime.datetime(*components)
def parse_isodate_str(self, datestr: str) -> datetime.date:
components, pos = self.__parse_isodate(datestr)
if len(datestr) > pos:
raise ValueError('String contains invalid time components')
if len(components) > 3:
raise ValueError('String contains invalid time components')
return datetime.date(*components)
DEFAULT_ISOPARSER = CustomIsoparser()
@functools.lru_cache()
def as_date(arg: str) -> datetime.date:
"""
type = "string"
format = "date"
"""
return DEFAULT_ISOPARSER.parse_isodate_str(arg)
@functools.lru_cache()
def as_datetime(arg: str) -> datetime.datetime:
"""
type = "string"
format = "date-time"
"""
return DEFAULT_ISOPARSER.parse_isodatetime(arg)
@functools.lru_cache()
def METHOD_NAME(arg: str) -> decimal.Decimal:
"""
Applicable when storing decimals that are sent over the wire as strings
type = "string"
format = "number"
"""
return decimal.Decimal(arg)
@functools.lru_cache()
def as_uuid(arg: str) -> uuid.UUID:
"""
type = "string"
format = "uuid"
"""
return uuid.UUID(arg | null |
811 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkslb.endpoint import endpoint_data
class SetLoadBalancerUDPListenerAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Slb', '2014-05-15', 'SetLoadBalancerUDPListenerAttribute','slb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AclStatus(self): # String
return self.get_query_params().get('AclStatus')
def set_AclStatus(self, AclStatus): # String
self.add_query_param('AclStatus', AclStatus)
def get_AclType(self): # String
return self.get_query_params().get('AclType')
def set_AclType(self, AclType): # String
self.add_query_param('AclType', AclType)
def get_MasterSlaveServerGroup(self): # String
return self.get_query_params().get('MasterSlaveServerGroup')
def set_MasterSlaveServerGroup(self, MasterSlaveServerGroup): # String
self.add_query_param('MasterSlaveServerGroup', MasterSlaveServerGroup)
def get_VServerGroupId(self): # String
return self.get_query_params().get('VServerGroupId')
def set_VServerGroupId(self, VServerGroupId): # String
self.add_query_param('VServerGroupId', VServerGroupId)
def get_AclId(self): # String
return self.get_query_params().get('AclId')
def set_AclId(self, AclId): # String
self.add_query_param('AclId', AclId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_MasterSlaveServerGroupId(self): # String
return self.get_query_params().get('MasterSlaveServerGroupId')
def set_MasterSlaveServerGroupId(self, MasterSlaveServerGroupId): # String
self.add_query_param('MasterSlaveServerGroupId', MasterSlaveServerGroupId)
def get_healthCheckReq(self): # String
return self.get_query_params().get('healthCheckReq')
def set_healthCheckReq(self, healthCheckReq): # String
self.add_query_param('healthCheckReq', healthCheckReq)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_healthCheckExp(self): # String
return self.get_query_params().get('healthCheckExp')
def set_healthCheckExp(self, healthCheckExp): # String
self.add_query_param('healthCheckExp', healthCheckExp)
def get_ProxyProtocolV2Enabled(self): # Boolean
return self.get_query_params().get('ProxyProtocolV2Enabled')
def set_ProxyProtocolV2Enabled(self, ProxyProtocolV2Enabled): # Boolean
self.add_query_param('ProxyProtocolV2Enabled', ProxyProtocolV2Enabled)
def get_HealthCheckSwitch(self): # String
return self.get_query_params().get('HealthCheckSwitch')
def set_HealthCheckSwitch(self, HealthCheckSwitch): # String
self.add_query_param('HealthCheckSwitch', HealthCheckSwitch)
def get_HealthCheckConnectTimeout(self): # Integer
return self.get_query_params().get('HealthCheckConnectTimeout')
def set_HealthCheckConnectTimeout(self, HealthCheckConnectTimeout): # Integer
self.add_query_param('HealthCheckConnectTimeout', HealthCheckConnectTimeout)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_ListenerPort(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Bandwidth(self): # Integer
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self, Bandwidth): # Integer
self.add_query_param('Bandwidth', Bandwidth)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def METHOD_NAME(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_HealthCheckConnectPort(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort)
def get_VServerGroup(self): # String
return self.get_query_params().get('VServerGroup')
def set_VServerGroup(self, VServerGroup): # String
self.add_query_param('VServerGroup', VServerGroup) | null |
812 | import time
from unittest import TestCase
from pcs.common import tools
class RunParallelTestCase(TestCase):
def test_run_all(self):
data_list = [([i], {}) for i in range(5)]
out_list = []
tools.run_parallel(out_list.append, data_list)
self.assertEqual(sorted(out_list), list(range(5)))
def test_parallelism(self):
timeout = 5
data_list = [[[i + 1], {}] for i in range(timeout)]
start_time = time.time()
# this should last for least timeout seconds, but less than sum of all
# times
tools.run_parallel(time.sleep, data_list)
finish_time = time.time()
elapsed_time = finish_time - start_time
self.assertTrue(elapsed_time > timeout)
self.assertTrue(elapsed_time < sum(i + 1 for i in range(timeout)))
class VersionTest(TestCase):
# pylint: disable=invalid-name
def assert_asterisk(self, expected, major, minor=None, revision=None):
self.assertEqual(expected, (major, minor, revision))
def assert_eq_tuple(self, a, b):
self.assert_eq(tools.Version(*a), tools.Version(*b))
def assert_lt_tuple(self, a, b):
self.METHOD_NAME(tools.Version(*a), tools.Version(*b))
def assert_eq(self, a, b):
self.assertTrue(a == b)
self.assertFalse(a != b)
self.assertFalse(a < b)
self.assertTrue(a <= b)
self.assertFalse(a > b)
self.assertTrue(a >= b)
def METHOD_NAME(self, a, b):
self.assertFalse(a == b)
self.assertTrue(a != b)
self.assertTrue(a < b)
self.assertTrue(a <= b)
self.assertFalse(a > b)
self.assertFalse(a >= b)
def test_major(self):
ver = tools.Version(2)
self.assert_asterisk((2, None, None), *ver)
self.assertEqual(ver.major, 2)
self.assertEqual(ver[0], 2)
self.assertEqual(ver.minor, None)
self.assertEqual(ver[1], None)
self.assertEqual(ver.revision, None)
self.assertEqual(ver[2], None)
self.assertEqual(ver.as_full_tuple, (2, 0, 0))
self.assertEqual(str(ver), "2")
self.assertEqual(str(ver.normalize()), "2.0.0")
def test_major_minor(self):
ver = tools.Version(2, 3)
self.assert_asterisk((2, 3, None), *ver)
self.assertEqual(ver.major, 2)
self.assertEqual(ver[0], 2)
self.assertEqual(ver.minor, 3)
self.assertEqual(ver[1], 3)
self.assertEqual(ver.revision, None)
self.assertEqual(ver[2], None)
self.assertEqual(ver.as_full_tuple, (2, 3, 0))
self.assertEqual(str(ver), "2.3")
self.assertEqual(str(ver.normalize()), "2.3.0")
def test_major_minor_revision(self):
ver = tools.Version(2, 3, 4)
self.assert_asterisk((2, 3, 4), *ver)
self.assertEqual(ver.major, 2)
self.assertEqual(ver[0], 2)
self.assertEqual(ver.minor, 3)
self.assertEqual(ver[1], 3)
self.assertEqual(ver.revision, 4)
self.assertEqual(ver[2], 4)
self.assertEqual(ver.as_full_tuple, (2, 3, 4))
self.assertEqual(str(ver), "2.3.4")
self.assertEqual(str(ver.normalize()), "2.3.4")
def test_compare(self):
self.assert_eq_tuple((2,), (2,))
self.assert_lt_tuple((2,), (3,))
self.assert_eq_tuple((2, 0), (2, 0))
self.assert_lt_tuple((2, 0), (2, 5))
self.assert_lt_tuple((2, 0), (3, 5))
self.assert_eq_tuple((2, 0), (2,))
self.assert_lt_tuple((2, 0), (3,))
self.assert_lt_tuple((2, 5), (3,))
self.assert_lt_tuple((3,), (3, 5))
self.assert_eq_tuple((2, 0, 0), (2, 0, 0))
self.assert_lt_tuple((2, 0, 0), (2, 0, 1))
self.assert_lt_tuple((2, 0, 0), (2, 5, 0))
self.assert_lt_tuple((2, 0, 0), (2, 5, 1))
self.assert_lt_tuple((2, 0, 0), (3, 0, 0))
self.assert_lt_tuple((2, 0, 0), (3, 0, 1))
self.assert_lt_tuple((2, 0, 0), (3, 5, 0))
self.assert_lt_tuple((2, 0, 0), (3, 5, 1))
self.assert_eq_tuple((2, 0, 0), (2, 0))
self.assert_eq_tuple((2, 0, 0), (2,))
self.assert_lt_tuple((2, 0, 0), (2, 5))
self.assert_lt_tuple((2, 0, 0), (3,))
self.assert_lt_tuple((2, 5, 0), (3,))
self.assert_lt_tuple((2,), (2, 5, 0))
self.assert_eq_tuple((2, 5, 0), (2, 5))
self.assert_lt_tuple((2, 5, 0), (3, 5))
self.assert_lt_tuple((2, 0), (2, 5, 1))
self.assert_lt_tuple((2, 5), (2, 5, 1))
self.assert_lt_tuple((2, 5, 1), (3, 5))
self.assert_lt_tuple((2, 5, 1), (3,))
self.assert_lt_tuple((2,), (2, 5, 1))
self.assert_lt_tuple((2, 5, 1), (3,))
self.assert_lt_tuple((2,), (3, 5, 1))
self.assert_lt_tuple((3,), (3, 5, 1))
self.assert_lt_tuple((2, 0), (3, 5, 1))
self.assert_lt_tuple((2, 5), (3, 5, 1))
self.assert_lt_tuple((3, 5), (3, 5, 1))
class TimeoutToSecondsTest(TestCase):
def test_valid(self):
self.assertEqual(10, tools.timeout_to_seconds(10))
self.assertEqual(10, tools.timeout_to_seconds("10"))
self.assertEqual(10, tools.timeout_to_seconds("10s"))
self.assertEqual(10, tools.timeout_to_seconds("10sec"))
self.assertEqual(600, tools.timeout_to_seconds("10m"))
self.assertEqual(600, tools.timeout_to_seconds("10min"))
self.assertEqual(36000, tools.timeout_to_seconds("10h"))
self.assertEqual(36000, tools.timeout_to_seconds("10hr"))
def test_invalid(self):
self.assertEqual(None, tools.timeout_to_seconds(-10))
self.assertEqual(None, tools.timeout_to_seconds("1a1s"))
self.assertEqual(None, tools.timeout_to_seconds("10mm"))
self.assertEqual(None, tools.timeout_to_seconds("10mim"))
self.assertEqual(None, tools.timeout_to_seconds("aaa"))
self.assertEqual(None, tools.timeout_to_seconds("")) | null |
813 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import hashlib
from random import Random
from typing import Any, Callable, Dict, List, Optional, Type, Union
import numpy as np
import torch
from overrides import overrides
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.search_space import (
BayesOptSearchSpace,
EvolutionarySearchSpace,
)
from archai.discrete_search.search_spaces.config import utils
from archai.discrete_search.search_spaces.config.arch_config import (
ArchConfig,
build_arch_config,
)
from archai.discrete_search.search_spaces.config.arch_param_tree import ArchParamTree
class ConfigSearchSpace(EvolutionarySearchSpace, BayesOptSearchSpace):
def __init__(
self,
model_cls: Type[torch.nn.Module],
arch_param_tree: Union[ArchParamTree, Callable[..., ArchParamTree]],
seed: Optional[int] = None,
mutation_prob: float = 0.3,
track_unused_params: bool = True,
unused_param_value: float = -1.0,
hash_archid: bool = True,
model_kwargs: Optional[Dict[str, Any]] = None,
builder_kwargs: Optional[Dict[str, Any]] = None,
) -> None:
"""Config-based Discrete Search Space.
Args:
model_cls (Type[torch.nn.Module]): Model class. This class expects that the first argument
from `model_cls` constructor an `ArchConfig` object.
arch_param_tree (Union[ArchParamTree, Callable[..., ArchParamTree]]): `ArchParamTree` object
or a builder function that returns an `ArchParamTree` object.
seed (int, optional): Random seed used for sampling, mutations and crossovers. Defaults to None.
mutation_prob (float, optional): Probability of mutating a parameter. Defaults to 0.3.
track_unused_params (bool, optional): Whether to track unused parameters. Defaults to True.
unused_param_value (int, optional): Value to use for unused parameters. Defaults to `float('NaN')`.
hash_archid (bool, optional): Weather to hash architecture identifiers. Defaults to True.
model_kwargs: Additional arguments to pass to `model_cls` constructor.
builder_kwargs: Arguments to pass to `arch_param_tree` if a builder function is passed.
"""
self.model_cls = model_cls
self.arch_param_tree = arch_param_tree
self.mutation_prob = mutation_prob
self.track_unused_params = track_unused_params
self.unused_param_value = unused_param_value
self.model_kwargs = model_kwargs or {}
self.builder_kwargs = builder_kwargs or {}
self.hash_archid = hash_archid
if callable(self.arch_param_tree):
self.arch_param_tree = self.arch_param_tree(**self.builder_kwargs)
self.rng = Random(seed)
def get_archid(self, arch_config: ArchConfig) -> str:
"""Return the architecture identifier for the given architecture configuration.
Args:
arch_config: Architecture configuration.
Returns:
Architecture identifier.
"""
archid = self.arch_param_tree.encode_config(arch_config, track_unused_params=self.track_unused_params)
archid = str(tuple(archid))
if self.hash_archid:
archid = hashlib.sha1(archid.encode("utf-8")).hexdigest()
return archid
@overrides
def save_arch(self, model: ArchaiModel, path: str) -> None:
model.metadata["config"].to_file(path)
@overrides
def METHOD_NAME(self, path: str) -> ArchaiModel:
config = ArchConfig.from_file(path)
model = self.model_cls(config, **self.model_kwargs)
return ArchaiModel(arch=model, archid=self.get_archid(config), metadata={"config": config})
@overrides
def save_model_weights(self, model: ArchaiModel, path: str) -> None:
torch.save(model.arch.get_state_dict(), path)
@overrides
def load_model_weights(self, model: ArchaiModel, path: str) -> None:
model.arch.load_state_dict(torch.load(path))
@overrides
def random_sample(self) -> ArchaiModel:
config = self.arch_param_tree.sample_config(self.rng)
model = self.model_cls(config, **self.model_kwargs)
return ArchaiModel(arch=model, archid=self.get_archid(config), metadata={"config": config})
@overrides
def mutate(self, model: ArchaiModel) -> ArchaiModel:
choices_dict = self.arch_param_tree.to_dict()
# Mutates parameter with probability `self.mutation_prob`
mutated_dict = utils.replace_ptree_pair_choices(
choices_dict,
model.metadata["config"].to_dict(),
lambda d_choice, current_choice: (
self.rng.choice(d_choice.choices) if self.rng.random() < self.mutation_prob else current_choice
),
)
mutated_config = build_arch_config(mutated_dict)
mutated_model = self.model_cls(mutated_config, **self.model_kwargs)
return ArchaiModel(
arch=mutated_model, archid=self.get_archid(mutated_config), metadata={"config": mutated_config}
)
@overrides
def crossover(self, model_list: List[ArchaiModel]) -> ArchaiModel:
# Selects two models from `model_list` to perform crossover
model_1, model_2 = self.rng.choices(model_list, k=2)
# Starting with arch param tree dict, randomly replaces DiscreteChoice objects
# with params from model_1 with probability 0.5
choices_dict = self.arch_param_tree.to_dict()
cross_dict = utils.replace_ptree_pair_choices(
choices_dict,
model_1.metadata["config"].to_dict(),
lambda d_choice, m1_value: (m1_value if self.rng.random() < 0.5 else d_choice),
)
# Replaces all remaining DiscreteChoice objects with params from model_2
cross_dict = utils.replace_ptree_pair_choices(
cross_dict, model_2.metadata["config"].to_dict(), lambda d_choice, m2_value: m2_value
)
cross_config = build_arch_config(cross_dict)
cross_model = self.model_cls(cross_config, **self.model_kwargs)
return ArchaiModel(arch=cross_model, archid=self.get_archid(cross_config), metadata={"config": cross_config})
@overrides
def encode(self, model: ArchaiModel) -> np.ndarray:
encoded_config = np.array(
self.arch_param_tree.encode_config(
model.metadata["config"],
track_unused_params=self.track_unused_params
)
)
return np.nan_to_num(encoded_config, nan=self.unused_param_value) | null |
814 | import unittest
import pytest
import torch
from pytest_mock import MockerFixture
from torch import distributed as dist
from lightly.loss import SwaVLoss
class TestNTXentLoss:
def test__sinkhorn_gather_distributed(self, mocker: MockerFixture) -> None:
mock_is_available = mocker.patch.object(dist, "is_available", return_value=True)
SwaVLoss(sinkhorn_gather_distributed=True)
mock_is_available.assert_called_once()
def METHOD_NAME(
self, mocker: MockerFixture
) -> None:
mock_is_available = mocker.patch.object(
dist, "is_available", return_value=False
)
with pytest.raises(ValueError):
SwaVLoss(sinkhorn_gather_distributed=True)
mock_is_available.assert_called_once()
class TestSwaVLossUnitTest(unittest.TestCase):
# Old tests in unittest style, please add new tests to TestSwavLoss using pytest.
def test_forward_pass(self):
n = 32
n_high_res = 2
high_res = [torch.eye(32, 32) for i in range(n_high_res)]
for n_low_res in range(6):
for sinkhorn_iterations in range(3):
criterion = SwaVLoss(sinkhorn_iterations=sinkhorn_iterations)
low_res = [torch.eye(n, n) for i in range(n_low_res)]
with self.subTest(
msg=f"n_low_res={n_low_res}, sinkhorn_iterations={sinkhorn_iterations}"
):
loss = criterion(high_res, low_res)
# loss should be almost zero for unit matrix
self.assertGreater(0.5, loss.cpu().numpy())
def test_forward_pass_queue(self):
n = 32
n_high_res = 2
high_res = [torch.eye(32, 32) for i in range(n_high_res)]
queue_length = 128
queue = [torch.eye(128, 32) for i in range(n_high_res)]
for n_low_res in range(6):
for sinkhorn_iterations in range(3):
criterion = SwaVLoss(sinkhorn_iterations=sinkhorn_iterations)
low_res = [torch.eye(n, n) for i in range(n_low_res)]
with self.subTest(
msg=f"n_low_res={n_low_res}, sinkhorn_iterations={sinkhorn_iterations}"
):
loss = criterion(high_res, low_res, queue)
# loss should be almost zero for unit matrix
self.assertGreater(0.5, loss.cpu().numpy())
def test_forward_pass_bsz_1(self):
n = 32
n_high_res = 2
high_res = [torch.eye(1, n) for i in range(n_high_res)]
for n_low_res in range(6):
for sinkhorn_iterations in range(3):
criterion = SwaVLoss(sinkhorn_iterations=sinkhorn_iterations)
low_res = [torch.eye(1, n) for i in range(n_low_res)]
with self.subTest(
msg=f"n_low_res={n_low_res}, sinkhorn_iterations={sinkhorn_iterations}"
):
loss = criterion(high_res, low_res)
def test_forward_pass_1d(self):
n = 32
n_high_res = 2
high_res = [torch.eye(n, 1) for i in range(n_high_res)]
for n_low_res in range(6):
for sinkhorn_iterations in range(3):
criterion = SwaVLoss(sinkhorn_iterations=sinkhorn_iterations)
low_res = [torch.eye(n, 1) for i in range(n_low_res)]
with self.subTest(
msg=f"n_low_res={n_low_res}, sinkhorn_iterations={sinkhorn_iterations}"
):
loss = criterion(high_res, low_res)
# loss should be almost zero for unit matrix
self.assertGreater(0.5, loss.cpu().numpy())
@unittest.skipUnless(torch.cuda.is_available(), "skip")
def test_forward_pass_cuda(self):
n = 32
n_high_res = 2
high_res = [torch.eye(n, n).cuda() for i in range(n_high_res)]
for n_low_res in range(6):
for sinkhorn_iterations in range(3):
criterion = SwaVLoss(sinkhorn_iterations=sinkhorn_iterations)
low_res = [torch.eye(n, n).cuda() for i in range(n_low_res)]
with self.subTest(
msg=f"n_low_res={n_low_res}, sinkhorn_iterations={sinkhorn_iterations}"
):
loss = criterion(high_res, low_res)
# loss should be almost zero for unit matrix
self.assertGreater(0.5, loss.cpu().numpy()) | null |
815 | # Copyright (c) ZenML GmbH 2022. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Implements a custom model for the Kserve integration."""
from typing import Any, Dict
import click
import kserve
from zenml.logger import get_logger
from zenml.utils import source_utils
logger = get_logger(__name__)
DEFAULT_MODEL_NAME = "model"
DEFAULT_LOCAL_MODEL_DIR = "/mnt/models"
class ZenMLCustomModel(kserve.Model): # type: ignore[misc]
"""Custom model class for ZenML and Kserve.
This class is used to implement a custom model for the Kserve integration,
which is used as the main entry point for custom code execution.
Attributes:
model_name: The name of the model.
model_uri: The URI of the model.
predict_func: The predict function of the model.
"""
def __init__(
self,
model_name: str,
model_uri: str,
predict_func: str,
):
"""Initializes a ZenMLCustomModel object.
Args:
model_name: The name of the model.
model_uri: The URI of the model.
predict_func: The predict function of the model.
"""
super().__init__(model_name)
self.name = model_name
self.model_uri = model_uri
self.predict_func = source_utils.load(predict_func)
self.model = None
self.ready = False
def load(self) -> bool:
"""Load the model.
This function loads the model into memory and sets the ready flag to True.
The model is loaded using the materializer, by saving the information of
the artifact to a YAML file in the same path as the model artifacts at
the preparing time and loading it again at the prediction time by
the materializer.
Returns:
True if the model was loaded successfully, False otherwise.
"""
try:
from zenml.utils.artifact_utils import load_model_from_metadata
self.model = load_model_from_metadata(self.model_uri)
except Exception as e:
logger.error("Failed to load model: {}".format(e))
return False
self.ready = True
return self.ready
def predict(self, request: Dict[str, Any]) -> Dict[str, Any]:
"""Predict the given request.
The main predict function of the model. This function is called by the
KServe server when a request is received. Then inside this function,
the user-defined predict function is called.
Args:
request: The request to predict in a dictionary. e.g. {"instances": []}
Returns:
The prediction dictionary.
Raises:
RuntimeError: If function could not be called.
NotImplementedError: If the model is not ready.
TypeError: If the request is not a dictionary.
"""
if self.predict_func is not None:
try:
prediction = {
"predictions": self.predict_func(
self.model, request["instances"]
)
}
except RuntimeError as err:
raise RuntimeError("Failed to predict: {}".format(err))
if isinstance(prediction, dict):
return prediction
else:
raise TypeError(
f"Prediction is not a dictionary. Expecting a dictionary but got {type(prediction)}"
)
else:
raise NotImplementedError("Predict function is not implemented")
@click.command()
@click.option(
"--model_uri",
default=DEFAULT_LOCAL_MODEL_DIR,
type=click.STRING,
help="The directory where the model is stored locally.",
)
@click.option(
"--model_name",
default=DEFAULT_MODEL_NAME,
required=True,
type=click.STRING,
help="The name of the model to deploy. This is important for the KServe server.",
)
@click.option(
"--predict_func",
required=True,
type=click.STRING,
help="The path to the custom predict function defined by the user.",
)
def METHOD_NAME(model_name: str, model_uri: str, predict_func: str) -> None:
"""Main function responsible for starting the KServe server.
The way the custom deployment server works with the KServe server is by
implementing a custom model class and passing it to the KServe server and then
starting the server. Because custom classes usually need some parameters to
be passed to the model, the parameters are passed from the entry point to the
main function as arguments and then passed to the model class constructor.
The following is an example of the entry point:
```
entrypoint_command = [
"python",
"-m",
"zenml.integrations.kserve.custom_deployer.zenml_custom_model",
"--model_name",
config.service_config.model_name,
"--predict_func",
config.custom_deploy_parameters.predict_function,
]
```
Args:
model_name: The name of the model.
model_uri: The URI of the model.
predict_func: The path to the predict function defined by the user.
"""
model = ZenMLCustomModel(model_name, model_uri, predict_func)
model.load()
kserve.ModelServer().start([model])
if __name__ == "__main__":
METHOD_NAME() | null |
816 | # Copyright 2017-2022 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline.hpc.logger import Logger
def _perform_command(action, msg, error_msg, skip_on_failure):
Logger.info(msg)
try:
action()
except RuntimeError as e:
Logger.warn(error_msg)
if not skip_on_failure:
raise RuntimeError(error_msg, e)
class GridEngineType:
SGE = "SGE"
SLURM = "SLURM"
def __init__(self):
pass
class AllocationRuleParsingError(RuntimeError):
pass
class AllocationRule:
ALLOWED_VALUES = ['$pe_slots', '$fill_up', '$round_robin']
def __init__(self, value):
if value in AllocationRule.ALLOWED_VALUES:
self.value = value
else:
raise AllocationRuleParsingError('Wrong AllocationRule value, only %s is available!' % AllocationRule.ALLOWED_VALUES)
@staticmethod
def pe_slots():
return AllocationRule('$pe_slots')
@staticmethod
def fill_up():
return AllocationRule('$fill_up')
@staticmethod
def round_robin():
return AllocationRule('$round_robin')
@staticmethod
def fractional_rules():
return [AllocationRule.round_robin(), AllocationRule.fill_up()]
@staticmethod
def integral_rules():
return [AllocationRule.pe_slots()]
def __eq__(self, other):
if not isinstance(other, AllocationRule):
# don't attempt to compare against unrelated types
return False
return other.value == self.value
class GridEngineJobState:
RUNNING = 'running'
PENDING = 'pending'
SUSPENDED = 'suspended'
ERROR = 'errored'
DELETED = 'deleted'
COMPLETED = 'completed'
UNKNOWN = 'unknown'
_letter_codes_to_states = {
# Job statuses: [SGE] + [SLURM]
RUNNING: ['r', 't', 'Rr', 'Rt'] + ['RUNNING'],
PENDING: ['qw', 'qw', 'hqw', 'hqw', 'hRwq', 'hRwq', 'hRwq', 'qw', 'qw'] + ['PENDING'],
SUSPENDED: ['s', 'ts', 'S', 'tS', 'T', 'tT', 'Rs', 'Rts', 'RS', 'RtS', 'RT', 'RtT'] + ['SUSPENDED', 'STOPPED'],
ERROR: ['Eqw', 'Ehqw', 'EhRqw'] + ['DEADLINE', ' FAILED'],
DELETED: ['dr', 'dt', 'dRr', 'dRt', 'ds', 'dS', 'dT', 'dRs', 'dRS', 'dRT'] + ['DELETED', 'CANCELLED'],
COMPLETED: [] + ['COMPLETED', 'COMPLETING']
}
@staticmethod
def from_letter_code(code):
for key in GridEngineJobState._letter_codes_to_states:
if code in GridEngineJobState._letter_codes_to_states[key]:
return key
return GridEngineJobState.UNKNOWN
class GridEngineJob:
def __init__(self, id, root_id, name, user, state, datetime, hosts=None, cpu=0, gpu=0, mem=0, pe='local'):
self.id = id
self.root_id = root_id
self.name = name
self.user = user
self.state = state
self.datetime = datetime
self.hosts = hosts if hosts else []
self.cpu = cpu
self.gpu = gpu
self.mem = mem
self.pe = pe
def __repr__(self):
return str(self.__dict__)
class GridEngine:
def get_jobs(self):
pass
def disable_host(self, host):
"""
Disables host to prevent receiving new jobs from the queue.
This command does not abort currently running jobs.
:param host: Host to be enabled.
"""
pass
def enable_host(self, host):
"""
Enables host to make it available to receive new jobs from the queue.
:param host: Host to be enabled.
"""
pass
def METHOD_NAME(self, pe):
"""
Returns allocation rule of the pe
:param pe: Parallel environment to return allocation rule.
"""
pass
def delete_host(self, host, skip_on_failure=False):
"""
Completely deletes host from GE:
1. Shutdown host execution daemon.
2. Removes host from queue settings.
3. Removes host from host group.
4. Removes host from administrative hosts.
5. Removes host from GE.
:param host: Host to be removed.
:param skip_on_failure: Specifies if the host killing should be continued even if some of
the commands has failed.
"""
pass
def get_host_supplies(self):
pass
def get_host_supply(self, host):
pass
def get_engine_type(self):
pass
def is_valid(self, host):
"""
Validates host in GE checking corresponding execution host availability and its states.
:param host: Host to be checked.
:return: True if execution host is valid.
"""
return True
def kill_jobs(self, jobs, force=False):
"""
Kills jobs in GE.
:param jobs: Grid engine jobs.
:param force: Specifies if this command should be performed with -f flag.
"""
pass
class GridEngineDemandSelector:
def select(self, jobs):
pass
class GridEngineJobValidator:
def validate(self, jobs):
pass | null |
817 | #! /usr/bin/python
#
# Copyright (c) 2017 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import PlotPowerStates as plotter
import argparse
import os
from subprocess import call
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("--statsfile", required=True, help="stats file path")
parser.add_argument(
"--bankutils",
default="b1 b2 b3",
help="target bank " 'utilization values separated by space, e.g. "1 4 8"',
)
parser.add_argument(
"--seqbytes",
default="s1 s2 s3",
help="no. of "
"sequential bytes requested by each traffic gen request."
' e.g. "64 256 512"',
)
parser.add_argument(
"--delays",
default="d1 d2 d3",
help="string of delay" ' values separated by a space. e.g. "1 20 100"',
)
parser.add_argument(
"--outdir", help="directory to output plots", default="plot_test"
)
parser.add_argument("--pdf", action="store_true", help="output Latex and pdf")
def main():
args = parser.parse_args()
if not os.path.isfile(args.statsfile):
exit(f"Error! File not found: {args.statsfile}")
if not os.path.isdir(args.outdir):
os.mkdir(args.outdir)
bank_util_list = args.bankutils.strip().split()
seqbyte_list = args.seqbytes.strip().split()
delays = args.delays.strip().split()
plotter.plotLowPStates(
args.outdir + "/", args.statsfile, bank_util_list, seqbyte_list, delays
)
if args.pdf:
textwidth = "0.5"
### Time and energy plots ###
#############################
# place tex and pdf files in outdir
os.chdir(args.outdir)
texfile_s = "stacked_lowp_sweep.tex"
print("\t", texfile_s)
outfile = open(texfile_s, "w")
startDocText(outfile)
outfile.write("\\begin{figure} \n\\centering\n")
## Time plots for all delay values
for delay in delays:
# Time
filename = plotter.stateTimePlotName(str(delay) + "-")
outfile.write(wrapForGraphic(filename, textwidth))
outfile.write(getCaption(delay))
outfile.write("\end{figure}\n")
# Energy plots for all delay values
outfile.write("\\begin{figure} \n\\centering\n")
for delay in delays:
# Energy
filename = plotter.stateEnergyPlotName(str(delay) + "-")
outfile.write(wrapForGraphic(filename, textwidth))
outfile.write(getCaption(delay))
outfile.write("\\end{figure}\n")
METHOD_NAME(outfile)
outfile.close()
print("\n Generating pdf file")
print("*******************************")
print("\tpdflatex ", texfile_s)
# Run pdflatex to generate to pdf
call(["pdflatex", texfile_s])
call(["open", texfile_s.split(".")[0] + ".pdf"])
def getCaption(delay):
return "\\caption{" + "itt delay = " + str(delay) + "}\n"
def wrapForGraphic(filename, width="1.0"):
# \t is tab and needs to be escaped, therefore \\textwidth
return (
"\\includegraphics[width=" + width + "\\textwidth]{" + filename + "}\n"
)
def startDocText(outfile):
start_stuff = """
\\documentclass[a4paper,landscape,twocolumn]{article}
\\usepackage{graphicx}
\\usepackage[margin=0.5cm]{geometry}
\\begin{document}
"""
outfile.write(start_stuff)
def METHOD_NAME(outfile):
end_stuff = """
\\end{document}
"""
outfile.write(end_stuff)
# Call main
if __name__ == "__main__":
main() | null |
818 | # -*- coding: utf-8 -*-
'''
ubmodule-msg.py: simple response packet logger
Authors: Zdenek Vasicek (vasicek AT fit.vutbr.cz)
Marek Vavrusa (xvavru00 AT stud.fit.vutbr.cz)
Copyright (c) 2008. All rights reserved.
This software is open source.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
import os
def METHOD_NAME(id, cfg):
log_info("pythonmod: init called, module id is %d port: %d script: %s" % (id, cfg.port, cfg.python_script))
return True
def deinit(id):
log_info("pythonmod: deinit called, module id is %d" % id)
return True
def inform_super(id, qstate, superqstate, qdata):
return True
def setTTL(qstate, ttl):
"""Sets return_msg TTL and all the RRs TTL"""
if qstate.return_msg:
qstate.return_msg.rep.ttl = ttl
if (qstate.return_msg.rep):
for i in range(0,qstate.return_msg.rep.rrset_count):
d = qstate.return_msg.rep.rrsets[i].entry.data
for j in range(0,d.count+d.rrsig_count):
d.rr_ttl[j] = ttl
def dataHex(data, prefix=""):
res = ""
for i in range(0, (len(data)+15)/16):
res += "%s0x%02X | " % (prefix, i*16)
d = map(lambda x:ord(x), data[i*16:i*16+17])
for ch in d:
res += "%02X " % ch
for i in range(0,17-len(d)):
res += " "
res += "| "
for ch in d:
if (ch < 32) or (ch > 127):
res += ". "
else:
res += "%c " % ch
res += "\n"
return res
def printReturnMsg(qstate):
print "Return MSG rep :: flags: %04X, QDcount: %d, Security:%d, TTL=%d" % (qstate.return_msg.rep.flags, qstate.return_msg.rep.qdcount,qstate.return_msg.rep.security, qstate.return_msg.rep.ttl)
print " qinfo :: qname:",qstate.return_msg.qinfo.qname_list, qstate.return_msg.qinfo.qname_str, "type:",qstate.return_msg.qinfo.qtype_str, "class:",qstate.return_msg.qinfo.qclass_str
if (qstate.return_msg.rep):
print "RRSets:",qstate.return_msg.rep.rrset_count
prevkey = None
for i in range(0,qstate.return_msg.rep.rrset_count):
r = qstate.return_msg.rep.rrsets[i]
rk = r.rk
print i,":",rk.dname_list, rk.dname_str, "flags: %04X" % rk.flags,
print "type:",rk.type_str,"(%d)" % ntohs(rk.type), "class:",rk.rrset_class_str,"(%d)" % ntohs(rk.rrset_class)
d = r.entry.data
print " RRDatas:",d.count+d.rrsig_count
for j in range(0,d.count+d.rrsig_count):
print " ",j,":","TTL=",d.rr_ttl[j],"RR data:"
print dataHex(d.rr_data[j]," ")
def operate(id, event, qstate, qdata):
log_info("pythonmod: operate called, id: %d, event:%s" % (id, strmodulevent(event)))
#print "pythonmod: per query data", qdata
print "Query:", ''.join(map(lambda x:chr(max(32,ord(x))),qstate.qinfo.qname)), qstate.qinfo.qname_list, qstate.qinfo.qname_str,
print "Type:",qstate.qinfo.qtype_str,"(%d)" % qstate.qinfo.qtype,
print "Class:",qstate.qinfo.qclass_str,"(%d)" % qstate.qinfo.qclass
print
#if event == MODULE_EVENT_PASS: #pokud mame "validator python iterator"
if (event == MODULE_EVENT_NEW) and (qstate.qinfo.qname_str.endswith(".seznam.cz.")): #pokud mame "python validator iterator"
print qstate.qinfo.qname_str
qstate.ext_state[id] = MODULE_FINISHED
msg = DNSMessage(qstate.qinfo.qname_str, RR_TYPE_A, RR_CLASS_IN, PKT_QR | PKT_RA | PKT_AA) #, 300)
#msg.authority.append("xxx.seznam.cz. 10 IN A 192.168.1.1")
#msg.additional.append("yyy.seznam.cz. 10 IN A 1.1.1.2.")
if qstate.qinfo.qtype == RR_TYPE_A:
msg.answer.append("%s 10 IN A 192.168.1.1" % qstate.qinfo.qname_str)
if (qstate.qinfo.qtype == RR_TYPE_SRV) or (qstate.qinfo.qtype == RR_TYPE_ANY):
msg.answer.append("%s 10 IN SRV 0 0 80 neinfo.example.com." % qstate.qinfo.qname_str)
if (qstate.qinfo.qtype == RR_TYPE_TXT) or (qstate.qinfo.qtype == RR_TYPE_ANY):
msg.answer.append("%s 10 IN TXT path=/" % qstate.qinfo.qname_str)
if not msg.set_return_msg(qstate):
qstate.ext_state[id] = MODULE_ERROR
return True
#qstate.return_msg.rep.security = 2 #pokud nebude nasledovat validator, je zapotrebi nastavit security, aby nebyl paket zahozen v mesh_send_reply
printReturnMsg(qstate)
#Authoritative result can't be stored in cache
#if (not storeQueryInCache(qstate, qstate.return_msg.qinfo, qstate.return_msg.rep, 0)):
# print "Can't store in cache"
# qstate.ext_state[id] = MODULE_ERROR
# return False
#print "Store OK"
qstate.return_rcode = RCODE_NOERROR
return True
if event == MODULE_EVENT_NEW:
qstate.ext_state[id] = MODULE_WAIT_MODULE
return True
if event == MODULE_EVENT_MODDONE:
log_info("pythonmod: previous module done")
qstate.ext_state[id] = MODULE_FINISHED
return True
if event == MODULE_EVENT_PASS:
log_info("pythonmod: event_pass")
qstate.ext_state[id] = MODULE_WAIT_MODULE
return True
log_err("pythonmod: BAD event")
qstate.ext_state[id] = MODULE_ERROR
return True
log_info("pythonmod: script loaded.") | null |
819 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdomain.endpoint import endpoint_data
class QueryAdvancedDomainListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Domain', '2018-01-29', 'QueryAdvancedDomainList')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ProductDomainType(self): # String
return self.get_query_params().get('ProductDomainType')
def set_ProductDomainType(self, ProductDomainType): # String
self.add_query_param('ProductDomainType', ProductDomainType)
def get_PageNum(self): # Integer
return self.get_query_params().get('PageNum')
def set_PageNum(self, PageNum): # Integer
self.add_query_param('PageNum', PageNum)
def get_Excluded(self): # String
return self.get_query_params().get('Excluded')
def set_Excluded(self, Excluded): # String
self.add_query_param('Excluded', Excluded)
def get_StartLength(self): # Integer
return self.get_query_params().get('StartLength')
def set_StartLength(self, StartLength): # Integer
self.add_query_param('StartLength', StartLength)
def get_ExcludedSuffix(self): # Boolean
return self.get_query_params().get('ExcludedSuffix')
def set_ExcludedSuffix(self, ExcludedSuffix): # Boolean
self.add_query_param('ExcludedSuffix', ExcludedSuffix)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_Lang(self): # String
return self.get_query_params().get('Lang')
def METHOD_NAME(self, Lang): # String
self.add_query_param('Lang', Lang)
def get_ExcludedPrefix(self): # Boolean
return self.get_query_params().get('ExcludedPrefix')
def set_ExcludedPrefix(self, ExcludedPrefix): # Boolean
self.add_query_param('ExcludedPrefix', ExcludedPrefix)
def get_KeyWord(self): # String
return self.get_query_params().get('KeyWord')
def set_KeyWord(self, KeyWord): # String
self.add_query_param('KeyWord', KeyWord)
def get_ProductDomainTypeSort(self): # Boolean
return self.get_query_params().get('ProductDomainTypeSort')
def set_ProductDomainTypeSort(self, ProductDomainTypeSort): # Boolean
self.add_query_param('ProductDomainTypeSort', ProductDomainTypeSort)
def get_EndExpirationDate(self): # Long
return self.get_query_params().get('EndExpirationDate')
def set_EndExpirationDate(self, EndExpirationDate): # Long
self.add_query_param('EndExpirationDate', EndExpirationDate)
def get_Suffixs(self): # String
return self.get_query_params().get('Suffixs')
def set_Suffixs(self, Suffixs): # String
self.add_query_param('Suffixs', Suffixs)
def get_DomainNameSort(self): # Boolean
return self.get_query_params().get('DomainNameSort')
def set_DomainNameSort(self, DomainNameSort): # Boolean
self.add_query_param('DomainNameSort', DomainNameSort)
def get_ExpirationDateSort(self): # Boolean
return self.get_query_params().get('ExpirationDateSort')
def set_ExpirationDateSort(self, ExpirationDateSort): # Boolean
self.add_query_param('ExpirationDateSort', ExpirationDateSort)
def get_StartExpirationDate(self): # Long
return self.get_query_params().get('StartExpirationDate')
def set_StartExpirationDate(self, StartExpirationDate): # Long
self.add_query_param('StartExpirationDate', StartExpirationDate)
def get_DomainStatus(self): # Integer
return self.get_query_params().get('DomainStatus')
def set_DomainStatus(self, DomainStatus): # Integer
self.add_query_param('DomainStatus', DomainStatus)
def get_DomainGroupId(self): # Long
return self.get_query_params().get('DomainGroupId')
def set_DomainGroupId(self, DomainGroupId): # Long
self.add_query_param('DomainGroupId', DomainGroupId)
def get_KeyWordSuffix(self): # Boolean
return self.get_query_params().get('KeyWordSuffix')
def set_KeyWordSuffix(self, KeyWordSuffix): # Boolean
self.add_query_param('KeyWordSuffix', KeyWordSuffix)
def get_KeyWordPrefix(self): # Boolean
return self.get_query_params().get('KeyWordPrefix')
def set_KeyWordPrefix(self, KeyWordPrefix): # Boolean
self.add_query_param('KeyWordPrefix', KeyWordPrefix)
def get_TradeType(self): # Integer
return self.get_query_params().get('TradeType')
def set_TradeType(self, TradeType): # Integer
self.add_query_param('TradeType', TradeType)
def get_EndRegistrationDate(self): # Long
return self.get_query_params().get('EndRegistrationDate')
def set_EndRegistrationDate(self, EndRegistrationDate): # Long
self.add_query_param('EndRegistrationDate', EndRegistrationDate)
def get_Form(self): # Integer
return self.get_query_params().get('Form')
def set_Form(self, Form): # Integer
self.add_query_param('Form', Form)
def get_UserClientIp(self): # String
return self.get_query_params().get('UserClientIp')
def set_UserClientIp(self, UserClientIp): # String
self.add_query_param('UserClientIp', UserClientIp)
def get_RegistrationDateSort(self): # Boolean
return self.get_query_params().get('RegistrationDateSort')
def set_RegistrationDateSort(self, RegistrationDateSort): # Boolean
self.add_query_param('RegistrationDateSort', RegistrationDateSort)
def get_StartRegistrationDate(self): # Long
return self.get_query_params().get('StartRegistrationDate')
def set_StartRegistrationDate(self, StartRegistrationDate): # Long
self.add_query_param('StartRegistrationDate', StartRegistrationDate)
def get_EndLength(self): # Integer
return self.get_query_params().get('EndLength')
def set_EndLength(self, EndLength): # Integer
self.add_query_param('EndLength', EndLength) | null |
820 | import os.path
import shutil
import subprocess
import time
from concurrent.futures import ThreadPoolExecutor
from os import path
from subprocess import Popen, PIPE
from selectors import EVENT_READ, DefaultSelector
from threading import Lock
import pkg_resources
import yaml
from simulator.log import Level, error, log
module_dir = os.path.dirname(pkg_resources.resource_filename(__name__, '__init__.py'))
simulator_home = os.environ.get('SIMULATOR_HOME')
bin_dir = os.path.join(simulator_home, "bin")
class AtomicLong:
def __init__(self, value=0):
self.lock = Lock()
self.value = value
def get(self):
with self.lock:
return self.value
def set(self, value):
with self.lock:
self.value = value
def inc(self, amount=1):
with self.lock:
self.value += amount
def METHOD_NAME(file):
with open(file, 'r') as f:
return f.METHOD_NAME()
def write(file, text):
with open(file, 'w') as f:
return f.write(text)
def write_yaml(file, content):
with open(file, 'w') as f:
yaml.dump(content, f)
def now_seconds():
return round(time.time())
def remove(file):
if not path.exists(file):
return
if path.isfile(file):
os.remove(file)
else:
shutil.rmtree(file)
def validate_dir(path):
path = os.path.expanduser(path)
if not os.path.exists(path):
print(f"Directory [{path}] does not exist")
exit(1)
if not os.path.isdir(f"{path}/"):
print(f"Directory [{path}] is not a directory")
exit(1)
return path
def validate_git_dir(path):
path = validate_dir(path)
if not path.endswith("/.git"):
corrected_path = f"{path}/.git"
return validate_git_dir(corrected_path)
if not os.path.exists(f"{path}/refs"):
print(f"Directory [{path}] is not valid git directory")
exit(1)
return path
def mkdir(path):
path = os.path.expanduser(path)
if os.path.isdir(path):
return path
if os.path.exists(path):
exit_with_error(f"Can't create directory [{path}], file with the same name already exists.")
os.makedirs(path)
return path
def dump(obj):
for attr in dir(obj):
print(("obj.%s = %s" % (attr, getattr(obj, attr))))
def load_yaml_file(path):
if not os.path.exists(path):
exit_with_error(f"Could not find file [{path}]")
with open(path) as f:
return yaml.load(f, Loader=yaml.FullLoader)
def run_parallel(target, args_list, max_workers=8):
results = []
with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = []
for args in args_list:
futures.append(executor.submit(target, *args))
for f in futures:
results.append(f.result())
return results
def exit_with_error(text):
error(text)
exit(1)
def shell_logged(cmd, log_file_path, exit_on_error=False):
return_code = shell(cmd, shell=True, use_print=False, log_file_path=log_file_path)
if return_code != 0 and exit_on_error:
print(f"Failed to run [{cmd}], exitcode: {return_code}. Check {log_file_path} for details.")
exit(1)
return return_code
def shell(cmd, shell=True, use_print=False, log_file_path=None):
process = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=shell)
selector = DefaultSelector()
selector.register(process.stdout, EVENT_READ)
selector.register(process.stderr, EVENT_READ)
if log_file_path:
with open(log_file_path, "a") as f:
return read_loop(process, selector, use_print, file=f)
else:
return read_loop(process, selector, use_print)
def read_loop(process, selector, use_print, file=None):
while True:
for key, _ in selector.select():
data = key.fileobj.read1().decode()
if not data:
return process.wait()
if use_print:
log_level = Level.print
else:
log_level = Level.info if key.fileobj is process.stdout else Level.warn
log(data, log_level, file=file)
def __parse_tag(s):
items = s.split('=')
key = items[0].strip()
value = None
if len(items) > 1:
# rejoin the rest:
value = '='.join(items[1:])
return (key, value)
def parse_tags(items):
d = {}
if items:
flat_list = [item for sublist in items for item in sublist]
for item in flat_list:
key, value = __parse_tag(item)
d[key] = value
return d | null |
821 | """OTX Core Data Adapter."""
# Copyright (C) 2022 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
# pylint: disable=too-many-return-statements, too-many-arguments
import importlib
import os
from otx.algorithms.common.configs.training_base import TrainType
from otx.api.entities.model_template import TaskType
ADAPTERS = {
TaskType.CLASSIFICATION: {
"Incremental": {
"module_name": "classification_dataset_adapter",
"class": "ClassificationDatasetAdapter",
},
"Selfsupervised": {
"module_name": "classification_dataset_adapter",
"class": "SelfSLClassificationDatasetAdapter",
},
},
TaskType.DETECTION: {
"Incremental": {
"module_name": "detection_dataset_adapter",
"class": "DetectionDatasetAdapter",
}
},
TaskType.ROTATED_DETECTION: {
"Incremental": {
"module_name": "detection_dataset_adapter",
"class": "DetectionDatasetAdapter",
}
},
TaskType.INSTANCE_SEGMENTATION: {
"Incremental": {
"module_name": "detection_dataset_adapter",
"class": "DetectionDatasetAdapter",
}
},
TaskType.SEGMENTATION: {
"Incremental": {
"module_name": "segmentation_dataset_adapter",
"class": "SegmentationDatasetAdapter",
},
"Selfsupervised": {
"module_name": "segmentation_dataset_adapter",
"class": "SelfSLSegmentationDatasetAdapter",
},
},
TaskType.ANOMALY_CLASSIFICATION: {
"Incremental": {
"module_name": "anomaly_dataset_adapter",
"class": "AnomalyClassificationDatasetAdapter",
}
},
TaskType.ANOMALY_DETECTION: {
"Incremental": {
"module_name": "anomaly_dataset_adapter",
"class": "AnomalyDetectionDatasetAdapter",
}
},
TaskType.ANOMALY_SEGMENTATION: {
"Incremental": {
"module_name": "anomaly_dataset_adapter",
"class": "AnomalySegmentationDatasetAdapter",
}
},
}
if os.getenv("FEATURE_FLAGS_OTX_ACTION_TASKS", "0") == "1":
ADAPTERS.update(
{
TaskType.ACTION_CLASSIFICATION: {
"Incremental": {
"module_name": "action_dataset_adapter",
"class": "ActionClassificationDatasetAdapter",
}
},
TaskType.ACTION_DETECTION: {
"Incremental": {
"module_name": "action_dataset_adapter",
"class": "ActionDetectionDatasetAdapter",
}
},
}
)
# TODO: update to real template
if os.getenv("FEATURE_FLAGS_OTX_VISUAL_PROMPTING_TASKS", "0") == "1":
ADAPTERS.update(
{
TaskType.VISUAL_PROMPTING: {
"Incremental": {
"module_name": "visual_prompting_dataset_adapter",
"class": "VisualPromptingDatasetAdapter",
}
},
}
)
def METHOD_NAME(
task_type: TaskType,
train_type: TrainType,
train_data_roots: str = None,
train_ann_files: str = None,
val_data_roots: str = None,
val_ann_files: str = None,
test_data_roots: str = None,
test_ann_files: str = None,
unlabeled_data_roots: str = None,
unlabeled_file_list: str = None,
**kwargs,
):
"""Returns a dataset class by task type.
Args:
task_type: A task type such as ANOMALY_CLASSIFICATION, ANOMALY_DETECTION, ANOMALY_SEGMENTATION,
CLASSIFICATION, INSTANCE_SEGMENTATION, DETECTION, CLASSIFICATION, ROTATED_DETECTION, SEGMENTATION.
train_type: train type such as Incremental and Selfsupervised.
Selfsupervised is only supported for SEGMENTATION.
train_data_roots: the path of data root for training data
train_ann_files: the path of annotation file for training data
val_data_roots: the path of data root for validation data
val_ann_files: the path of annotation file for validation data
test_data_roots: the path of data root for test data
test_ann_files: the path of annotation file for test data
unlabeled_data_roots: the path of data root for unlabeled data
unlabeled_file_list: the path of unlabeled file list
kwargs: optional kwargs
"""
train_type_to_be_called = str(
train_type if train_type == TrainType.Selfsupervised.value else TrainType.Incremental.value
)
module_root = "otx.core.data.adapter."
module = importlib.import_module(module_root + ADAPTERS[task_type][train_type_to_be_called]["module_name"])
return getattr(module, ADAPTERS[task_type][train_type_to_be_called]["class"])(
task_type=task_type,
train_data_roots=train_data_roots,
train_ann_files=train_ann_files,
val_data_roots=val_data_roots,
val_ann_files=val_ann_files,
test_data_roots=test_data_roots,
test_ann_files=test_ann_files,
unlabeled_data_roots=unlabeled_data_roots,
unlabeled_file_list=unlabeled_file_list,
**kwargs,
) | null |
822 | import IMP.test
import IMP.pmi
import pickle
def _parse_restraint_info(info):
"""Convert RestraintInfo object to Python dict"""
d = {}
if info is None:
return d
info.set_was_used(True)
for typ in ('int', 'float', 'string', 'filename', 'floats', 'filenames',
'particle_indexes'):
for i in range(getattr(info, 'get_number_of_' + typ)()):
key = getattr(info, 'get_%s_key' % typ)(i)
value = getattr(info, 'get_%s_value' % typ)(i)
d[key] = value
return d
class Tests(IMP.test.TestCase):
def METHOD_NAME(self):
"""Test metadata without linker info"""
m = IMP.Model()
r = IMP.pmi.CrossLinkRestraintSet(m, "foo")
r.set_metadata('test_fn', 25.0, 0.1)
info = _parse_restraint_info(r.get_static_info())
self.assertEqual(len(info.keys()), 4)
self.assertAlmostEqual(info['linker length'], 25.0, delta=1e-3)
self.assertAlmostEqual(info['slope'], 0.1, delta=1e-3)
self.assertEqual(info['filename'], 'test_fn')
self.assertEqual(
info['type'], 'IMP.pmi.CrossLinkingMassSpectrometryRestraint')
def test_metadata_linker_smiles(self):
"""Test metadata with linker containing just SMILES info"""
m = IMP.Model()
r = IMP.pmi.CrossLinkRestraintSet(m, "foo")
r.set_metadata('test_fn', 25.0, 0.1)
r.set_linker_auth_name('DSS')
r.set_linker_smiles('CC')
info = _parse_restraint_info(r.get_static_info())
self.assertEqual(
sorted(info.keys()),
['filename', 'linker author name', 'linker length',
'linker smiles', 'slope', 'type'])
self.assertEqual(info['linker author name'], 'DSS')
self.assertEqual(info['linker smiles'], 'CC')
def test_metadata_linker_full(self):
"""Test metadata with linker containing full info"""
m = IMP.Model()
r = IMP.pmi.CrossLinkRestraintSet(m, "foo")
r.set_metadata('test_fn', 25.0, 0.1)
r.set_linker_auth_name('DSS')
r.set_linker_chemical_name('chem')
r.set_linker_smiles('CC')
r.set_linker_smiles_canonical('CC2')
r.set_linker_inchi('testinchi')
r.set_linker_inchi_key('testinchikey')
info = _parse_restraint_info(r.get_static_info())
self.assertEqual(
sorted(info.keys()),
['filename', 'linker author name', 'linker chemical name',
'linker inchi', 'linker inchi key', 'linker length',
'linker smiles', 'linker smiles canonical', 'slope', 'type'])
self.assertEqual(info['linker author name'], 'DSS')
self.assertEqual(info['linker chemical name'], 'chem')
self.assertEqual(info['linker smiles'], 'CC')
self.assertEqual(info['linker smiles canonical'], 'CC2')
self.assertEqual(info['linker inchi'], 'testinchi')
self.assertEqual(info['linker inchi key'], 'testinchikey')
def test_pickle(self):
"""Test (un-)pickle of CrossLinkRestraintSet"""
m = IMP.Model()
r = IMP.pmi.CrossLinkRestraintSet(m, "foo")
r.set_metadata('test_fn', 25.0, 0.1)
dump = pickle.dumps(r)
newrsr = pickle.loads(dump)
self.assertEqual(newrsr.get_name(), "foo")
info = _parse_restraint_info(newrsr.get_static_info())
self.assertAlmostEqual(info['linker length'], 25.0, delta=1e-3)
self.assertEqual(info['filename'], 'test_fn')
def test_pickle_polymorphic(self):
"""Test (un-)pickle of CrossLinkRestraintSet via polymorphic pointer"""
m = IMP.Model()
r = IMP.pmi.CrossLinkRestraintSet(m, "foo")
r.set_metadata('test_fn', 25.0, 0.1)
sf = IMP.core.RestraintsScoringFunction([r])
dump = pickle.dumps(sf)
newsf = pickle.loads(dump)
newrsr, = newsf.restraints
self.assertEqual(newrsr.get_name(), "foo")
info = _parse_restraint_info(newrsr.get_static_info())
self.assertAlmostEqual(info['linker length'], 25.0, delta=1e-3)
self.assertEqual(info['filename'], 'test_fn')
if __name__ == '__main__':
IMP.test.main() | null |
823 | """Tests the xonsh.procs.specs"""
import itertools
import sys
from subprocess import Popen
import pytest
from xonsh.procs.posix import PopenThread
from xonsh.procs.proxies import STDOUT_DISPATCHER, ProcProxy, ProcProxyThread
from xonsh.procs.specs import SubprocSpec, cmds_to_specs, run_subproc
from xonsh.pytest.tools import skip_if_on_windows
from xonsh.tools import XonshError
@skip_if_on_windows
def METHOD_NAME(xession):
env = xession.env
cmds = [["pwd"]]
# XONSH_CAPTURE_ALWAYS=False should disable interactive threaded subprocs
env["XONSH_CAPTURE_ALWAYS"] = False
env["THREAD_SUBPROCS"] = True
specs = cmds_to_specs(cmds, captured="hiddenobject")
assert specs[0].cls is Popen
# Now for the other situations
env["XONSH_CAPTURE_ALWAYS"] = True
# First check that threadable subprocs become threadable
env["THREAD_SUBPROCS"] = True
specs = cmds_to_specs(cmds, captured="hiddenobject")
assert specs[0].cls is PopenThread
# turn off threading and check we use Popen
env["THREAD_SUBPROCS"] = False
specs = cmds_to_specs(cmds, captured="hiddenobject")
assert specs[0].cls is Popen
# now check the threadbility of callable aliases
cmds = [[lambda: "Keras Selyrian"]]
# check that threadable alias become threadable
env["THREAD_SUBPROCS"] = True
specs = cmds_to_specs(cmds, captured="hiddenobject")
assert specs[0].cls is ProcProxyThread
# turn off threading and check we use ProcProxy
env["THREAD_SUBPROCS"] = False
specs = cmds_to_specs(cmds, captured="hiddenobject")
assert specs[0].cls is ProcProxy
@pytest.mark.parametrize("thread_subprocs", [True, False])
def test_cmds_to_specs_capture_stdout_not_stderr(thread_subprocs, xonsh_session):
env = xonsh_session.env
cmds = (["ls", "/root"],)
env["THREAD_SUBPROCS"] = thread_subprocs
specs = cmds_to_specs(cmds, captured="stdout")
assert specs[0].stdout is not None
assert specs[0].stderr is None
@skip_if_on_windows
@pytest.mark.parametrize("pipe", (True, False))
@pytest.mark.parametrize("alias_type", (None, "func", "exec", "simple"))
@pytest.mark.parametrize(
"thread_subprocs, capture_always", list(itertools.product((True, False), repeat=2))
)
@pytest.mark.flaky(reruns=5, reruns_delay=2)
def test_capture_always(
capfd, thread_subprocs, capture_always, alias_type, pipe, monkeypatch, xonsh_session
):
if not thread_subprocs and alias_type in ["func", "exec"]:
if pipe:
return pytest.skip("https://github.com/xonsh/xonsh/issues/4443")
else:
return pytest.skip("https://github.com/xonsh/xonsh/issues/4444")
env = xonsh_session.env
exp = "HELLO\nBYE\n"
cmds = [["echo", "-n", exp]]
if pipe:
exp = exp.splitlines()[1] + "\n" # second line
cmds += ["|", ["grep", "--color=never", exp.strip()]]
if alias_type:
first_cmd = cmds[0]
# Enable capfd for function aliases:
monkeypatch.setattr(STDOUT_DISPATCHER, "default", sys.stdout)
if alias_type == "func":
xonsh_session.aliases["tst"] = (
lambda: run_subproc([first_cmd], "hiddenobject") and None
) # Don't return a value
elif alias_type == "exec":
first_cmd = " ".join(repr(arg) for arg in first_cmd)
xonsh_session.aliases["tst"] = f"![{first_cmd}]"
else:
# alias_type == "simple"
xonsh_session.aliases["tst"] = first_cmd
cmds[0] = ["tst"]
env["THREAD_SUBPROCS"] = thread_subprocs
env["XONSH_CAPTURE_ALWAYS"] = capture_always
hidden = run_subproc(cmds, "hiddenobject") # ![]
# Check that interactive subprocs are always printed
assert exp in capfd.readouterr().out
if capture_always and thread_subprocs:
# Check that the interactive output was captured
assert hidden.out == exp
else:
# without THREAD_SUBPROCS capturing in ![] isn't possible
assert not hidden.out
# Explicitly captured commands are always captured
hidden = run_subproc(cmds, "object") # !()
hidden.end()
if thread_subprocs:
assert exp not in capfd.readouterr().out
assert hidden.out == exp
else:
# for some reason THREAD_SUBPROCS=False fails to capture in `!()` but still succeeds in `$()`
assert exp in capfd.readouterr().out
assert not hidden.out
output = run_subproc(cmds, "stdout") # $()
assert exp not in capfd.readouterr().out
assert output == exp
# Explicitly non-captured commands are never captured (/always printed)
run_subproc(cmds, captured=False) # $[]
assert exp in capfd.readouterr().out
@skip_if_on_windows
@pytest.mark.parametrize(
"captured, exp_is_none",
[
("object", False),
("stdout", True),
("hiddenobject", False),
(False, True),
],
)
def test_run_subproc_background(captured, exp_is_none):
cmds = (["echo", "hello"], "&")
return_val = run_subproc(cmds, captured)
assert (return_val is None) == exp_is_none
@pytest.mark.parametrize("thread_subprocs", [False, True])
def test_callable_alias_cls(thread_subprocs, xession):
class Cls:
def __call__(self, *args, **kwargs):
print(args, kwargs)
obj = Cls()
xession.aliases["tst"] = obj
env = xession.env
cmds = (["tst", "/root"],)
env["THREAD_SUBPROCS"] = thread_subprocs
spec = cmds_to_specs(cmds, captured="stdout")[0]
proc = spec.run()
assert proc.f == obj
@pytest.mark.parametrize("captured", ["hiddenobject", False])
def test_procproxy_not_captured(xession, captured):
xession.aliases["tst"] = lambda: 0
cmds = (["tst", "/root"],)
xession.env["THREAD_SUBPROCS"] = False
specs = cmds_to_specs(cmds, captured)
assert specs[0].cls is ProcProxy
# neither stdout nor stderr should be captured
assert specs[0].stdout is None
assert specs[0].stderr is None
def test_on_command_not_found_fires(xession):
xession.env.update(
dict(
XONSH_INTERACTIVE=True,
)
)
fired = False
def my_handler(cmd, **kwargs):
nonlocal fired
assert cmd[0] == "xonshcommandnotfound"
fired = True
xession.builtins.events.on_command_not_found(my_handler)
subproc = SubprocSpec.build(["xonshcommandnotfound"])
with pytest.raises(XonshError) as expected:
subproc.run()
assert "command not found: 'xonshcommandnotfound'" in str(expected.value)
assert fired
def test_on_command_not_found_doesnt_fire_in_non_interactive_mode(xession):
xession.env.update(
dict(
XONSH_INTERACTIVE=False,
)
)
fired = False
def my_handler(cmd, **kwargs):
nonlocal fired
assert cmd[0] == "xonshcommandnotfound"
fired = True
xession.builtins.events.on_command_not_found(my_handler)
subproc = SubprocSpec.build(["xonshcommandnotfound"])
with pytest.raises(XonshError) as expected:
subproc.run()
assert "command not found: 'xonshcommandnotfound'" in str(expected.value)
assert not fired | null |
824 | from typing import Dict
from boa3.internal.model.callable import Callable
from boa3.internal.model.method import Method
from boa3.internal.model.symbol import ISymbol
from boa3.internal.model.type.classes.classtype import ClassType
from boa3.internal.model.variable import Variable
class Module(ISymbol):
"""
A class used to represent a Python module
:ivar variables: a dictionary that maps each variable with its name. Empty by default.
:ivar methods: a dictionary that maps each method with its name. Empty by default.
:ivar callables: a dictionary that maps each callable object with its name. Empty by default.
:ivar classes: a dictionary that maps each class with its name. Empty by default.
:ivar imported_symbols: a dictionary that maps each imported symbol with its name. Empty by default.
"""
def __init__(self, variables: Dict[str, Variable] = None, methods: Dict[str, Method] = None):
if variables is None:
variables = {}
self.variables = variables
if methods is None:
methods = {}
self.methods = methods
self.callables: Dict[str, Callable] = {}
self.classes: Dict[str, ClassType] = {}
self.defined_by_entry = True
self.imported_symbols = {}
self.assigned_variables = []
@property
def shadowing_name(self) -> str:
return 'module'
def METHOD_NAME(self, var_id: str, var: Variable):
"""
Includes a variable into the scope of the module
:param var_id: variable identifier
:param var: variable to be included
"""
if var_id not in self.symbols:
self.variables[var_id] = var
def is_variable_assigned(self, var_id: str) -> bool:
if var_id not in self.variables:
return False
if var_id in self.assigned_variables or var_id in self.imported_symbols:
return True
for imported in self.imported_symbols.values():
from boa3.internal.model.imports.importsymbol import Import
if isinstance(imported, Import) and self.variables[var_id] in imported.variables.values():
return True
return False
def assign_variable(self, var_id: str):
if var_id in self.variables:
self.assigned_variables.append(var_id)
def include_callable(self, method_id: str, method: Callable) -> bool:
"""
Includes a method into the scope of the module
:param method_id: method identifier
:param method: method to be included
"""
if method_id not in self.symbols:
if isinstance(method, Method):
self.methods[method_id] = method
else:
self.callables[method_id] = method
return True
return False
def include_class(self, class_id: str, class_obj: ClassType):
"""
Includes a class into the scope of the module
:param class_id: class identifier
:param class_obj: class object to be included
"""
if class_id not in self.symbols:
self.classes[class_id] = class_obj
def include_symbol(self, symbol_id: str, symbol: ISymbol):
"""
Includes a method into the scope of the module
:param symbol_id: method identifier
:param symbol: method to be included
"""
if symbol_id not in self.symbols:
if isinstance(symbol, Variable):
self.METHOD_NAME(symbol_id, symbol)
elif isinstance(symbol, Callable):
self.include_callable(symbol_id, symbol)
elif isinstance(symbol, ClassType):
self.include_class(symbol_id, symbol)
else:
self.imported_symbols[symbol_id] = symbol
@property
def symbols(self) -> Dict[str, ISymbol]:
"""
Gets all the symbols in the module
:return: a dictionary that maps each symbol in the module with its name
"""
symbols = {}
symbols.update(self.imported_symbols)
symbols.update(self.variables)
symbols.update(self.methods)
symbols.update(self.callables)
symbols.update(self.classes)
return symbols | null |
825 | import numpy as np
import pytest
from PIL import Image
from otx.algorithms.classification.adapters.mmcls.datasets.pipelines.otx_pipelines import (
GaussianBlur,
LoadImageFromOTXDataset,
LoadResizeDataFromOTXDataset,
ResizeTo,
OTXColorJitter,
PILImageToNDArray,
PostAug,
RandomAppliedTrans,
)
from otx.core.data.caching import MemCacheHandlerSingleton
from tests.test_suite.e2e_test_system import e2e_pytest_unit
from .test_datasets import create_cls_dataset
@pytest.fixture(scope="module")
def inputs_np():
return {"img": np.random.randint(0, 10, (16, 16, 3), dtype=np.uint8), "img_fields": ["img"]}
@pytest.fixture(scope="module")
def inputs_PIL():
return {
"img": Image.fromarray(np.random.randint(0, 10, (16, 16, 3), dtype=np.uint8)),
}
@e2e_pytest_unit
@pytest.mark.parametrize("to_float32", [False, True])
def test_load_image_from_otx_dataset_call(to_float32):
"""Test LoadImageFromOTXDataset."""
otx_dataset, labels = create_cls_dataset()
load_image_from_otx_dataset = LoadImageFromOTXDataset(to_float32)
results = dict(
dataset_item=otx_dataset[0],
width=otx_dataset[0].width,
height=otx_dataset[0].height,
index=0,
ann_info=dict(label_list=labels),
)
results = load_image_from_otx_dataset(results)
assert "filename" in results
assert "ori_filename" in results
assert "img" in results
assert "img_shape" in results
assert "ori_shape" in results
assert "pad_shape" in results
assert "img_norm_cfg" in results
assert "img_fields" in results
assert isinstance(results["img"], np.ndarray)
@e2e_pytest_unit
def test_load_resize_data_from_otx_dataset_call(mocker):
"""Test LoadResizeDataFromOTXDataset."""
otx_dataset, labels = create_cls_dataset()
MemCacheHandlerSingleton.create("singleprocessing", otx_dataset[0].numpy.size)
op = LoadResizeDataFromOTXDataset(
resize_cfg=dict(type="Resize", size=(4, 4)), # 8x8 -> 4x4
)
src_dict = dict(
dataset_item=otx_dataset[0],
width=otx_dataset[0].width,
height=otx_dataset[0].height,
index=0,
ann_info=dict(label_list=labels),
)
dst_dict = op(src_dict)
assert dst_dict["ori_shape"][0] == 8
assert dst_dict["img_shape"][0] == 4
assert dst_dict["img"].shape == dst_dict["img_shape"]
op._load_img_op = mocker.MagicMock()
dst_dict_from_cache = op(src_dict)
assert op._load_img_op.call_count == 0 # _load_img() should not be called
assert np.array_equal(dst_dict["img"], dst_dict_from_cache["img"])
assert dst_dict["ann_info"] == dst_dict_from_cache["ann_info"]
@e2e_pytest_unit
def test_load_resize_data_from_otx_dataset_downscale_only(mocker):
"""Test LoadResizeDataFromOTXDataset."""
otx_dataset, labels = create_cls_dataset()
MemCacheHandlerSingleton.create("singleprocessing", otx_dataset[0].numpy.size)
op = LoadResizeDataFromOTXDataset(
resize_cfg=dict(type="Resize", size=(12, 12), downscale_only=True), # 8x8 -> 12x12
)
src_dict = dict(
dataset_item=otx_dataset[0],
width=otx_dataset[0].width,
height=otx_dataset[0].height,
index=0,
ann_info=dict(label_list=labels),
)
dst_dict = op(src_dict)
assert dst_dict["ori_shape"][0] == 8
assert dst_dict["img_shape"][0] == 8 # Skipped upscale
assert dst_dict["img"].shape == dst_dict["img_shape"]
op._load_img_op = mocker.MagicMock()
dst_dict_from_cache = op(src_dict)
assert op._load_img_op.call_count == 0 # _load_img() should not be called
assert np.array_equal(dst_dict["img"], dst_dict_from_cache["img"])
assert dst_dict["ann_info"] == dst_dict_from_cache["ann_info"]
@e2e_pytest_unit
def test_resize_to(mocker, inputs_np):
"""Test LoadResizeDataFromOTXDataset."""
otx_dataset, labels = create_cls_dataset()
src_dict = dict(
**inputs_np,
ori_shape=(16, 16),
img_shape=(16, 16),
)
# Test downscale
op = ResizeTo(size=(4, 4))
dst_dict = op(src_dict)
assert dst_dict["ori_shape"][0] == 16
assert dst_dict["img_shape"][0] == 4
assert dst_dict["img"].shape == dst_dict["img_shape"]
# Test upscale from output
op = ResizeTo(size=(8, 8))
dst_dict = op(dst_dict)
assert dst_dict["ori_shape"][0] == 16
assert dst_dict["img_shape"][0] == 8
assert dst_dict["img"].shape == dst_dict["img_shape"]
# Test same size from output
op = ResizeTo(size=(8, 8))
op._resize_img = mocker.MagicMock()
dst_dict = op(dst_dict)
assert dst_dict["ori_shape"][0] == 16
assert dst_dict["img_shape"][0] == 8
assert op._resize_img.call_count == 0 # _resize_img() should not be called
@e2e_pytest_unit
def test_random_applied_transforms(mocker, inputs_np):
"""Test RandomAppliedTrans."""
mocker.patch(
"otx.algorithms.classification.adapters.mmcls.datasets.pipelines.otx_pipelines.build_from_cfg",
return_value=lambda x: x,
)
random_applied_transforms = RandomAppliedTrans(transforms=[dict()])
results = random_applied_transforms(inputs_np)
assert isinstance(results, dict)
assert "img" in results
assert repr(random_applied_transforms) == "RandomAppliedTrans"
@e2e_pytest_unit
def test_otx_color_jitter(inputs_np):
"""Test OTXColorJitter."""
otx_color_jitter = OTXColorJitter()
results = otx_color_jitter(inputs_np)
assert isinstance(results, dict)
assert "img" in results
@e2e_pytest_unit
def test_gaussian_blur(inputs_np):
"""Test GaussianBlur."""
gaussian_blur = GaussianBlur(sigma_min=0.1, sigma_max=0.2)
results = gaussian_blur(inputs_np)
assert isinstance(results, dict)
assert "img" in results
assert repr(gaussian_blur) == "GaussianBlur"
@e2e_pytest_unit
def test_pil_image_to_nd_array(inputs_PIL) -> None:
"""Test PILImageToNDArray."""
pil_image_to_nd_array = PILImageToNDArray(keys=["img"])
results = pil_image_to_nd_array(inputs_PIL)
assert "img" in results
assert isinstance(results["img"], np.ndarray)
assert repr(pil_image_to_nd_array) == "PILImageToNDArray"
@e2e_pytest_unit
def METHOD_NAME(mocker, inputs_np):
"""Test PostAug."""
mocker.patch(
"otx.algorithms.classification.adapters.mmcls.datasets.pipelines.otx_pipelines.Compose",
return_value=lambda x: x,
)
post_aug = PostAug(keys=dict(orig=lambda x: x))
results = post_aug(inputs_np)
assert isinstance(results, dict)
assert "img" in results and "img" in results["img_fields"]
assert "orig" in results and "orig" in results["img_fields"]
assert repr(post_aug) == "PostAug" | null |
826 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class ListTransitRouterMulticastGroupsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'ListTransitRouterMulticastGroups')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_NetworkInterfaceIdss(self): # RepeatList
return self.get_query_params().get('NetworkInterfaceIds')
def set_NetworkInterfaceIdss(self, NetworkInterfaceIds): # RepeatList
for depth1 in range(len(NetworkInterfaceIds)):
self.add_query_param('NetworkInterfaceIds.' + str(depth1 + 1), NetworkInterfaceIds[depth1])
def get_VSwitchIdss(self): # RepeatList
return self.get_query_params().get('VSwitchIds')
def set_VSwitchIdss(self, VSwitchIds): # RepeatList
for depth1 in range(len(VSwitchIds)):
self.add_query_param('VSwitchIds.' + str(depth1 + 1), VSwitchIds[depth1])
def get_TransitRouterMulticastDomainId(self): # String
return self.get_query_params().get('TransitRouterMulticastDomainId')
def set_TransitRouterMulticastDomainId(self, TransitRouterMulticastDomainId): # String
self.add_query_param('TransitRouterMulticastDomainId', TransitRouterMulticastDomainId)
def get_IsGroupSource(self): # Boolean
return self.get_query_params().get('IsGroupSource')
def set_IsGroupSource(self, IsGroupSource): # Boolean
self.add_query_param('IsGroupSource', IsGroupSource)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_GroupIpAddress(self): # String
return self.get_query_params().get('GroupIpAddress')
def set_GroupIpAddress(self, GroupIpAddress): # String
self.add_query_param('GroupIpAddress', GroupIpAddress)
def get_ResourceId(self): # String
return self.get_query_params().get('ResourceId')
def set_ResourceId(self, ResourceId): # String
self.add_query_param('ResourceId', ResourceId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_PeerTransitRouterMulticastDomainss(self): # RepeatList
return self.get_query_params().get('PeerTransitRouterMulticastDomains')
def set_PeerTransitRouterMulticastDomainss(self, PeerTransitRouterMulticastDomains): # RepeatList
for depth1 in range(len(PeerTransitRouterMulticastDomains)):
self.add_query_param('PeerTransitRouterMulticastDomains.' + str(depth1 + 1), PeerTransitRouterMulticastDomains[depth1])
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ResourceType(self): # String
return self.get_query_params().get('ResourceType')
def set_ResourceType(self, ResourceType): # String
self.add_query_param('ResourceType', ResourceType)
def get_TransitRouterAttachmentId(self): # String
return self.get_query_params().get('TransitRouterAttachmentId')
def set_TransitRouterAttachmentId(self, TransitRouterAttachmentId): # String
self.add_query_param('TransitRouterAttachmentId', TransitRouterAttachmentId)
def get_MaxResults(self): # Long
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Long
self.add_query_param('MaxResults', MaxResults)
def METHOD_NAME(self): # Boolean
return self.get_query_params().get('IsGroupMember')
def set_IsGroupMember(self, IsGroupMember): # Boolean
self.add_query_param('IsGroupMember', IsGroupMember) | null |
827 | # Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import numpy as np
import networkx as nx
import pandas as pd
import os
import tarfile
from urllib.request import urlopen
from sklearn.preprocessing import LabelEncoder
sys.path.append("../../utils/")
from neu.safe_extract import safe_extract
CORA_URL = "https://linqs-data.soe.ucsc.edu/public/lbc/cora.tgz"
def download_cora(url=CORA_URL):
'''
Download cora dataset.
'''
if not os.path.exists('./cora.tgz'):
print('Downloading cora dataset...')
data = urlopen(url).read()
with open('./cora.tgz', 'wb') as f:
f.write(data)
else:
print('Cora dataset already exists.')
if not os.path.exists('./cora'):
print('Extracting cora dataset...')
with tarfile.open('./cora.tgz', 'r') as f:
safe_extract(f, './')
else:
print('Cora dataset is already extracted.')
def load_cora(shuffle=True):
"""
Download and load cora dataset.
Return NetworkX graph and feature matrix and numerical labels.
"""
download_cora()
edge_df = pd.read_csv(os.path.join('./cora', 'cora.cites'),
sep='\t', header=None, names=["target", "source"])
if shuffle:
edge_df = edge_df.sample(frac=1)
G = nx.from_pandas_edgelist(edge_df)
feature_names = ['word_{}'.format(i) for i in range(1433)]
column_names = feature_names + ['subject']
node_df = pd.read_csv(os.path.join('./cora', 'cora.content'),
sep='\t', header=None, names=column_names)
node_index = [i for i in G.nodes]
node_df = node_df.reindex(index=node_index)
feature_matrix = np.array(node_df.iloc[:, 0:-1])
feature_matrix = row_normalization(feature_matrix)
labels = node_df['subject']
label_encoder = LabelEncoder()
labels = label_encoder.fit_transform(labels)
return G, feature_matrix, labels
def row_normalization(matrix):
'''
Normalize feature matrix.
'''
norm = np.linalg.norm(matrix, axis=1, keepdims=True)
matrix = matrix / norm
return matrix
def get_mask(labels_per_class, num_valid, num_test, num_nodes, num_classes, labels):
'''
Return mask index for semi-supervised training.
'''
all_index = np.arange(num_nodes)
train_index = []
cnt = [0 for _ in range(num_classes)]
for i, label in enumerate(labels):
if cnt[label] < labels_per_class:
train_index.append(i)
cnt[label] += 1
elif len(train_index) == num_classes * labels_per_class:
break
all_index = np.delete(all_index, train_index)
valid_test_index = np.random.choice(
all_index, num_valid + num_test, replace=False)
valid_index = valid_test_index[:num_valid]
test_index = valid_test_index[num_valid: num_valid + num_test]
return np.array(train_index), valid_index, test_index
def METHOD_NAME(G):
"""
Normalize adjacency matrix.
"""
A = nx.adjacency_matrix(G).todense().astype(np.float)
A_tilda = A + np.eye(A.shape[0]).astype(np.float) # A + I
degree = np.array(np.sum(A_tilda, axis=0))[0]
D_hat_inv_sqrt = np.diag(np.power(degree, -0.5))
A_hat = np.matmul(D_hat_inv_sqrt, np.matmul(
A_tilda, D_hat_inv_sqrt)) # D^(-0.5) * A * D^(-0.5)
return A_hat
def get_accuracy(predict, label, mask):
'''
Calculate accuray.
'''
mask = np.squeeze(mask)
predict = np.argmax(predict[mask], axis=1)
label = np.squeeze(label[mask])
correct = np.count_nonzero(predict == label)
accuracy = (correct / len(mask))
return accuracy | null |
828 | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
from test_framework.messages import (
COIN,
tx_from_hex,
)
class TxnMallTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.supports_cli = False
def METHOD_NAME(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
parser.add_argument("--segwit", dest="segwit", default=False, action="store_true",
help="Test behaviour with SegWit txn (which should fail)")
def setup_network(self):
# Start with split network:
super().setup_network()
self.disconnect_nodes(1, 2)
def run_test(self):
if self.options.segwit:
output_type = "p2sh-segwit"
else:
output_type = "legacy"
# All nodes should start with 1,250 BTC:
starting_balance = 1250
for i in range(3):
assert_equal(self.nodes[i].getbalance()['bitcoin'], starting_balance)
self.nodes[0].settxfee(.001)
node0_address1 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid1 = self.nodes[0].sendtoaddress(node0_address1, 1219)
node0_tx1 = self.nodes[0].gettransaction(node0_txid1)
node0_address2 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid2 = self.nodes[0].sendtoaddress(node0_address2, 29)
node0_tx2 = self.nodes[0].gettransaction(node0_txid2)
assert_equal(self.nodes[0].getbalance()['bitcoin'],
starting_balance + node0_tx1["fee"]['bitcoin'] + node0_tx2["fee"]['bitcoin'])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress()
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendtoaddress(node1_address, 40)
txid2 = self.nodes[0].sendtoaddress(node1_address, 20)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1, 1)
clone_inputs = [{"txid": rawtx1["vin"][0]["txid"], "vout": rawtx1["vin"][0]["vout"], "sequence": rawtx1["vin"][0]["sequence"]}]
clone_outputs = [
{rawtx1["vout"][0]["scriptPubKey"]["address"]: rawtx1["vout"][0]["value"]},
{rawtx1["vout"][1]["scriptPubKey"]["address"]: rawtx1["vout"][1]["value"]},
]
assert_equal(rawtx1["vout"][2]["scriptPubKey"]["type"], "fee")
clone_outputs.append({"fee": rawtx1["vout"][2]["value"]})
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
clone_tx = tx_from_hex(clone_raw)
if (rawtx1["vout"][0]["value"] == 40 and clone_tx.vout[0].nValue.getAmount() != 40*COIN or rawtx1["vout"][0]["value"] != 40 and clone_tx.vout[0].nValue.getAmount() == 40*COIN):
(clone_tx.vout[0], clone_tx.vout[1]) = (clone_tx.vout[1], clone_tx.vout[0])
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransactionwithwallet(clone_tx.serialize().hex(), None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
self.sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + node0_tx1["fee"]['bitcoin'] + node0_tx2["fee"]['bitcoin']
if self.options.mine_block:
expected += 50
expected += tx1["amount"]['bitcoin'] + tx1["fee"]['bitcoin']
expected += tx2["amount"]['bitcoin'] + tx2["fee"]['bitcoin']
assert_equal(self.nodes[0].getbalance()['bitcoin'], expected)
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(node0_tx1["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
if self.options.segwit:
assert_equal(txid1, txid1_clone)
return
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
self.connect_nodes(1, 2)
self.nodes[2].sendrawtransaction(node0_tx2["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
self.sync_blocks()
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 BTC for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance()['bitcoin'], expected)
if __name__ == '__main__':
TxnMallTest().main() | null |
829 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class AssociatePhysicalConnectionToVirtualBorderRouterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'AssociatePhysicalConnectionToVirtualBorderRouter','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_CircuitCode(self): # String
return self.get_query_params().get('CircuitCode')
def set_CircuitCode(self, CircuitCode): # String
self.add_query_param('CircuitCode', CircuitCode)
def get_VlanId(self): # String
return self.get_query_params().get('VlanId')
def set_VlanId(self, VlanId): # String
self.add_query_param('VlanId', VlanId)
def METHOD_NAME(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_EnableIpv6(self): # String
return self.get_query_params().get('EnableIpv6')
def set_EnableIpv6(self, EnableIpv6): # String
self.add_query_param('EnableIpv6', EnableIpv6)
def get_VbrId(self): # String
return self.get_query_params().get('VbrId')
def set_VbrId(self, VbrId): # String
self.add_query_param('VbrId', VbrId)
def get_PeerGatewayIp(self): # String
return self.get_query_params().get('PeerGatewayIp')
def set_PeerGatewayIp(self, PeerGatewayIp): # String
self.add_query_param('PeerGatewayIp', PeerGatewayIp)
def get_PeerIpv6GatewayIp(self): # String
return self.get_query_params().get('PeerIpv6GatewayIp')
def set_PeerIpv6GatewayIp(self, PeerIpv6GatewayIp): # String
self.add_query_param('PeerIpv6GatewayIp', PeerIpv6GatewayIp)
def get_PeeringSubnetMask(self): # String
return self.get_query_params().get('PeeringSubnetMask')
def set_PeeringSubnetMask(self, PeeringSubnetMask): # String
self.add_query_param('PeeringSubnetMask', PeeringSubnetMask)
def get_LocalGatewayIp(self): # String
return self.get_query_params().get('LocalGatewayIp')
def set_LocalGatewayIp(self, LocalGatewayIp): # String
self.add_query_param('LocalGatewayIp', LocalGatewayIp)
def get_PeeringIpv6SubnetMask(self): # String
return self.get_query_params().get('PeeringIpv6SubnetMask')
def set_PeeringIpv6SubnetMask(self, PeeringIpv6SubnetMask): # String
self.add_query_param('PeeringIpv6SubnetMask', PeeringIpv6SubnetMask)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_PhysicalConnectionId(self): # String
return self.get_query_params().get('PhysicalConnectionId')
def set_PhysicalConnectionId(self, PhysicalConnectionId): # String
self.add_query_param('PhysicalConnectionId', PhysicalConnectionId)
def get_LocalIpv6GatewayIp(self): # String
return self.get_query_params().get('LocalIpv6GatewayIp')
def set_LocalIpv6GatewayIp(self, LocalIpv6GatewayIp): # String
self.add_query_param('LocalIpv6GatewayIp', LocalIpv6GatewayIp) | null |
830 | # -*- coding: utf-8 -*-
"""Test CLR interface support."""
import Python.Test as Test
import pytest
from .utils import DictProxyType
def test_interface_standard_attrs():
"""Test standard class attributes."""
from Python.Test import IPublicInterface
assert IPublicInterface.__name__ == 'IPublicInterface'
assert IPublicInterface.__module__ == 'Python.Test'
assert isinstance(IPublicInterface.__dict__, DictProxyType)
def test_global_interface_visibility():
"""Test visibility of module-level interfaces."""
from Python.Test import IPublicInterface
assert IPublicInterface.__name__ == 'IPublicInterface'
with pytest.raises(ImportError):
from Python.Test import IInternalInterface
_ = IInternalInterface
with pytest.raises(AttributeError):
_ = Test.IInternalInterface
def test_nested_interface_visibility():
"""Test visibility of nested interfaces."""
from Python.Test import InterfaceTest
ob = InterfaceTest.IPublic
assert ob.__name__ == 'IPublic'
ob = InterfaceTest.IProtected
assert ob.__name__ == 'IProtected'
with pytest.raises(AttributeError):
_ = InterfaceTest.IInternal
with pytest.raises(AttributeError):
_ = InterfaceTest.IPrivate
def test_explicit_cast_to_interface():
"""Test explicit cast to an interface."""
from Python.Test import InterfaceTest
ob = InterfaceTest()
assert type(ob).__name__ == 'InterfaceTest'
assert hasattr(ob, 'HelloProperty')
i1 = Test.ISayHello1(ob)
assert type(i1).__name__ == 'ISayHello1'
assert hasattr(i1, 'SayHello')
assert i1.SayHello() == 'hello 1'
assert not hasattr(i1, 'HelloProperty')
assert i1.__implementation__ == ob
assert i1.__raw_implementation__ == ob
i2 = Test.ISayHello2(ob)
assert type(i2).__name__ == 'ISayHello2'
assert i2.SayHello() == 'hello 2'
assert hasattr(i2, 'SayHello')
assert not hasattr(i2, 'HelloProperty')
def test_interface_object_returned_through_method():
"""Test interface type is used if method return type is interface"""
from Python.Test import InterfaceTest
ob = InterfaceTest()
hello1 = ob.GetISayHello1()
assert type(hello1).__name__ == 'ISayHello1'
assert hello1.__implementation__.__class__.__name__ == "InterfaceTest"
assert hello1.SayHello() == 'hello 1'
def test_interface_object_returned_through_out_param():
"""Test interface type is used for out parameters of interface types"""
from Python.Test import InterfaceTest
ob = InterfaceTest()
hello2 = ob.GetISayHello2(None)
assert type(hello2).__name__ == 'ISayHello2'
assert hello2.SayHello() == 'hello 2'
def test_interface_out_param_python_impl():
from Python.Test import IOutArg, OutArgCaller
class MyOutImpl(IOutArg):
__namespace__ = "Python.Test"
def MyMethod_Out(self, name, index):
other_index = 101
return ('MyName', other_index)
py_impl = MyOutImpl()
assert 101 == OutArgCaller.CallMyMethod_Out(py_impl)
def test_null_interface_object_returned():
"""Test None is used also for methods with interface return types"""
from Python.Test import InterfaceTest
ob = InterfaceTest()
hello1, hello2 = ob.GetNoSayHello(None)
assert hello1 is None
assert hello2 is None
def test_interface_array_returned():
"""Test interface type used for methods returning interface arrays"""
from Python.Test import InterfaceTest
ob = InterfaceTest()
hellos = ob.GetISayHello1Array()
assert type(hellos[0]).__name__ == 'ISayHello1'
assert hellos[0].__implementation__.__class__.__name__ == "InterfaceTest"
def METHOD_NAME():
"""Test the __implementation__ and __raw_implementation__ properties"""
import System
clrVal = System.Int32(100)
i = System.IComparable(clrVal)
assert 100 == i.__implementation__
assert clrVal == i.__raw_implementation__
assert i.__implementation__ != i.__raw_implementation__
def test_interface_collection_iteration():
"""Test interface type is used when iterating over interface collection"""
import System
from System.Collections.Generic import List
elem = System.IComparable(System.Int32(100))
typed_list = List[System.IComparable]()
typed_list.Add(elem)
for e in typed_list:
assert type(e).__name__ == "IComparable"
untyped_list = System.Collections.ArrayList()
untyped_list.Add(elem)
for e in untyped_list:
assert type(e).__name__ == "int"
def test_methods_of_Object_are_available():
"""Test calling methods inherited from Object"""
import System
clrVal = System.Int32(100)
i = System.IComparable(clrVal)
assert i.Equals(clrVal)
assert clrVal.GetHashCode() == i.GetHashCode()
assert clrVal.GetType() == i.GetType()
assert clrVal.ToString() == i.ToString() | null |
831 | import io
from ..media_type_registration import serialization_registry
from ..utils import ensure_awaitable, modules_available
from .container import walk
from .table import (
APACHE_ARROW_FILE_MIME_TYPE,
XLSX_MIME_TYPE,
serialize_arrow,
serialize_csv,
serialize_excel,
serialize_html,
serialize_parquet,
)
async def as_dataset(node):
import xarray
data_vars = {}
coords = {}
if hasattr(node, "items_range"):
items = await node.items_range(0, None)
else:
items = node.items()
for key, array_adapter in items:
spec_names = set(spec.name for spec in array_adapter.specs)
arr = await ensure_awaitable(array_adapter.read)
if "xarray_data_var" in spec_names:
data_vars[key] = (array_adapter.structure().dims, arr)
elif "xarray_coord" in spec_names:
coords[key] = (array_adapter.structure().dims, arr)
else:
raise ValueError(
"Child nodes of xarray_dataset should include spec "
"'xarray_coord' or 'xarray_data_var'."
)
return xarray.Dataset(
data_vars=data_vars, coords=coords, attrs=node.metadata()["attrs"]
)
class _BytesIOThatIgnoresClose(io.BytesIO):
def close(self):
# When the netcdf writer tells us to close(), ignore it.
pass
if modules_available("scipy"):
# Both application/netcdf and application/x-netcdf are used.
# https://en.wikipedia.org/wiki/NetCDF
@serialization_registry.register(
"xarray_dataset", ["application/netcdf", "application/x-netcdf"]
)
async def serialize_netcdf(node, metadata, filter_for_access):
file = _BytesIOThatIgnoresClose()
# Per the xarray.Dataset.to_netcdf documentation,
# file-like objects are only supported by the scipy engine.
(await as_dataset(node)).to_netcdf(file, engine="scipy")
return file.getbuffer()
# Support DataFrame formats by first converting to DataFrame.
# This doesn't make much sense for N-dimensional variables, but for
# 1-dimensional variables it is useful.
@serialization_registry.register("xarray_dataset", APACHE_ARROW_FILE_MIME_TYPE)
async def serialize_dataset_arrow(node, metadata, filter_for_access):
return serialize_arrow((await as_dataset(node)).to_dataframe(), metadata)
@serialization_registry.register("xarray_dataset", "application/x-parquet")
async def serialize_dataset_parquet(node, metadata, filter_for_access):
return serialize_parquet((await as_dataset(node)).to_dataframe(), metadata)
@serialization_registry.register(
"xarray_dataset", ["text/csv", "text/comma-separated-values", "text/plain"]
)
async def serialize_dataset_csv(node, metadata, filter_for_access):
return serialize_csv((await as_dataset(node)).to_dataframe(), metadata)
@serialization_registry.register("xarray_dataset", "text/html")
async def serialize_dataset_html(node, metadata, filter_for_access):
return serialize_html((await as_dataset(node)).to_dataframe(), metadata)
@serialization_registry.register("xarray_dataset", XLSX_MIME_TYPE)
async def METHOD_NAME(node, metadata, filter_for_access):
return serialize_excel((await as_dataset(node)).to_dataframe(), metadata)
if modules_available("orjson"):
import orjson
@serialization_registry.register("xarray_dataset", "application/json")
async def serialize_json(node, metadata, filter_for_access):
df = (await as_dataset(node)).to_dataframe()
return orjson.dumps(
{column: df[column].tolist() for column in df},
)
if modules_available("h5py"):
@serialization_registry.register("xarray_dataset", "application/x-hdf5")
async def serialize_hdf5(node, metadata, filter_for_access):
"""
Like for node, but encode everything under 'attrs' in attrs.
"""
import h5py
buffer = io.BytesIO()
root_node = node
with h5py.File(buffer, mode="w") as file:
for k, v in metadata["attrs"].items():
file.attrs.create(k, v)
async for key_path, array_adapter in walk(node, filter_for_access):
group = file
node = root_node
for key in key_path[:-1]:
node = node[key]
if key in group:
group = group[key]
else:
group = group.create_group(key)
group.attrs.update(node.metadata()["attrs"])
data = array_adapter.read()
dataset = group.create_dataset(key_path[-1], data=data)
for k, v in array_adapter.metadata()["attrs"].items():
dataset.attrs.create(k, v)
return buffer.getbuffer() | null |
832 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class CreateSslVpnServerRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'CreateSslVpnServer','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_LocalSubnet(self): # String
return self.get_query_params().get('LocalSubnet')
def set_LocalSubnet(self, LocalSubnet): # String
self.add_query_param('LocalSubnet', LocalSubnet)
def get_IDaaSRegionId(self): # String
return self.get_query_params().get('IDaaSRegionId')
def set_IDaaSRegionId(self, IDaaSRegionId): # String
self.add_query_param('IDaaSRegionId', IDaaSRegionId)
def get_EnableMultiFactorAuth(self): # Boolean
return self.get_query_params().get('EnableMultiFactorAuth')
def METHOD_NAME(self, EnableMultiFactorAuth): # Boolean
self.add_query_param('EnableMultiFactorAuth', EnableMultiFactorAuth)
def get_IDaaSInstanceId(self): # String
return self.get_query_params().get('IDaaSInstanceId')
def set_IDaaSInstanceId(self, IDaaSInstanceId): # String
self.add_query_param('IDaaSInstanceId', IDaaSInstanceId)
def get_Cipher(self): # String
return self.get_query_params().get('Cipher')
def set_Cipher(self, Cipher): # String
self.add_query_param('Cipher', Cipher)
def get_ClientIpPool(self): # String
return self.get_query_params().get('ClientIpPool')
def set_ClientIpPool(self, ClientIpPool): # String
self.add_query_param('ClientIpPool', ClientIpPool)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Compress(self): # Boolean
return self.get_query_params().get('Compress')
def set_Compress(self, Compress): # Boolean
self.add_query_param('Compress', Compress)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_VpnGatewayId(self): # String
return self.get_query_params().get('VpnGatewayId')
def set_VpnGatewayId(self, VpnGatewayId): # String
self.add_query_param('VpnGatewayId', VpnGatewayId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_Port(self): # Integer
return self.get_query_params().get('Port')
def set_Port(self, Port): # Integer
self.add_query_param('Port', Port)
def get_Proto(self): # String
return self.get_query_params().get('Proto')
def set_Proto(self, Proto): # String
self.add_query_param('Proto', Proto)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name) | null |
833 | # ***************************************************************************
# *
# * Authors: Amaya Jimenez ([email protected])
# *
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2 of the License, or
# * (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# * 02111-1307 USA
# *
# * All comments concerning this program package may be sent to the
# * e-mail address '[email protected]'
# ***************************************************************************/
from pyworkflow.tests import BaseTest, setupTestProject, DataSet
from pyworkflow.plugin import Domain
from pwem.protocols import ProtImportMicrographs, ProtSubSet
from xmipp3.protocols.protocol_extract_particles import *
from xmipp3.protocols.protocol_cl2d import *
from xmipp3.protocols.protocol_center_particles import *
ProtCTFFind = Domain.importFromPlugin('cistem.protocols', 'CistemProtCTFFind',
doRaise=True)
EmanProtAutopick = Domain.importFromPlugin('eman2.protocols',
'EmanProtAutopick',
doRaise=True)
# Number of mics to be processed
NUM_MICS = 5
class TestCenterParticles(BaseTest):
@classmethod
def setUpClass(cls):
setupTestProject(cls)
cls.dsRelion = DataSet.getDataSet('relion_tutorial')
def importMicrographs(self):
prot = self.newProtocol(ProtImportMicrographs,
filesPath=self.dsRelion.getFile('micrographs'),
filesPattern='*.mrc',
samplingRateMode=1,
magnification=79096,
scannedPixelSize=56, voltage=300,
sphericalAberration=2.0)
self.launchProtocol(prot)
return prot
def subsetMics(self, inputMics):
protSubset = ProtSubSet()
protSubset.inputFullSet.set(inputMics)
protSubset.chooseAtRandom.set(True)
protSubset.nElements.set(NUM_MICS)
self.launchProtocol(protSubset)
return protSubset
def calculateCtf(self, inputMics):
protCTF = ProtCTFFind()
protCTF.inputMicrographs.set(inputMics)
# Gone in new version: protCTF.ctfDownFactor.set(1.0)
protCTF.lowRes.set(44)
protCTF.highRes.set(15)
self.launchProtocol(protCTF)
return protCTF
def runPicking(self, inputMicrographs):
""" Run a particle picking. """
protPicking = EmanProtAutopick(boxSize=64,
numberOfThreads=1,
numberOfMpi=1,
boxerMode=3,
gaussLow=0.001)
protPicking.inputMicrographs.set(inputMicrographs)
self.launchProtocol(protPicking)
return protPicking
def runExtractParticles(self, inputCoord, setCtfs):
protExtract = self.newProtocol(XmippProtExtractParticles,
boxSize=64,
doInvert = True,
doFlip = False)
protExtract.inputCoordinates.set(inputCoord)
protExtract.ctfRelations.set(setCtfs)
self.launchProtocol(protExtract)
return protExtract
def runClassify(self, inputParts):
numClasses = int(inputParts.getSize()/1000)
if numClasses<=2:
numClasses=4
protClassify = self.newProtocol(XmippProtCL2D,
numberOfIterations = 2,
numberOfClasses=numClasses,
numberOfInitialClasses=numClasses)
protClassify.inputParticles.set(inputParts)
self.launchProtocol(protClassify)
return protClassify, numClasses
def runRealign(self, inputClasses, inputMics):
protRealing = self.newProtocol(XmippProtCenterParticles)
protRealing.inputClasses.set(inputClasses)
protRealing.inputMics.set(inputMics)
self.launchProtocol(protRealing)
return protRealing
def METHOD_NAME(self):
protImportMics = self.importMicrographs()
if protImportMics.isFailed():
self.assertTrue(False)
if NUM_MICS<20:
protSubsetMics = self.subsetMics(protImportMics.outputMicrographs)
if protSubsetMics.isFailed():
self.assertTrue(False)
outMics = protSubsetMics.outputMicrographs
protCtf = self.calculateCtf(outMics)
if protCtf.isFailed():
self.assertTrue(False)
protPicking = self.runPicking(outMics)
if protPicking.isFailed():
self.assertTrue(False)
protExtract = self.runExtractParticles\
(protPicking.outputCoordinates,
protCtf.outputCTF)
if protExtract.isFailed():
self.assertTrue(False)
protClassify, numClasses = self.runClassify(protExtract.outputParticles)
if protClassify.isFailed():
self.assertTrue(False)
if not protClassify.hasAttribute('outputClasses'):
self.assertTrue(False)
if protClassify.outputClasses.getSize() != numClasses:
self.assertTrue(False)
protRealing = self.runRealign(protClassify.outputClasses,
outMics)
if protRealing.isFailed():
self.assertTrue(False)
if not protRealing.hasAttribute('outputClasses') or not \
protRealing.hasAttribute('outputParticles'):
self.assertTrue(False)
if protRealing.outputClasses.getSize() != numClasses:
self.assertTrue(False)
| null |
834 | __author__ = "Aleksandr Slepchenkov"
__email__ = "[email protected]"
from typing import (
Any,
Dict,
Iterable,
List,
Match,
Optional,
Pattern,
Sequence,
Tuple,
Type,
)
Tokens = List[Dict[str, Any]]
# There are too much levels of optional unions of lists of text in cell and align 385 and 396 lines in mistune
def escape(text: str, quote: bool = ..., smart_amp: bool = ...) -> str: ...
class BlockGrammar:
def_links: Pattern[str]
def_footnotes: Pattern[str]
newline: Pattern[str]
block_code: Pattern[str]
fences: Pattern[str]
hrule: Pattern[str]
heading: Pattern[str]
lheading: Pattern[str]
block_quote: Pattern[str]
list_block: Pattern[str]
list_item: Pattern[str]
list_bullet: Pattern[str]
paragraph: Pattern[str]
block_html: Pattern[str]
table: Pattern[str]
nptable: Pattern[str]
text: Pattern[str]
class BlockLexer:
grammar_class: Type[BlockGrammar]
default_rules: List[str]
list_rules: Tuple[str]
footnote_rules: Tuple[str]
tokens: Tokens
def_links: Dict[str, Dict[str, str]]
def_footnotes: Dict[str, int]
rules = ... # type: BlockGrammar
def __init__(self, rules: Optional[BlockGrammar] = ..., **kwargs: Any) -> None: ...
def __call__(self, text: str, rules: Optional[Sequence[str]] = ...) -> Tokens: ...
def parse(self, text: str, rules: Optional[Sequence[str]] = ...) -> Tokens: ...
def parse_newline(self, m: Match[str]) -> None: ...
def parse_block_code(self, m: Match[str]) -> None: ...
def parse_fences(self, m: Match[str]) -> None: ...
def parse_heading(self, m: Match[str]) -> None: ...
def parse_lheading(self, m: Match[str]) -> None: ...
def parse_hrule(self, m: Match[str]) -> None: ...
def parse_list_block(self, m: Match[str]) -> None: ...
def parse_block_quote(self, m: Match[str]) -> None: ...
def parse_def_links(self, m: Match[str]) -> None: ...
def parse_def_footnotes(self, m: Match[str]) -> None: ...
def parse_table(self, m: Match[str]) -> None: ...
def parse_nptable(self, m: Match[str]) -> None: ...
def parse_block_html(self, m: Match[str]) -> None: ...
def parse_paragraph(self, m: Match[str]) -> None: ...
def parse_text(self, m: Match[str]) -> None: ...
class InlineGrammar:
escape: Pattern[str]
inline_html: Pattern[str]
autolink: Pattern[str]
link: Pattern[str]
reflink: Pattern[str]
nolink: Pattern[str]
url: Pattern[str]
double_emphasis: Pattern[str]
emphasis: Pattern[str]
code: Pattern[str]
linebreak: Pattern[str]
METHOD_NAME: Pattern[str]
footnote: Pattern[str]
text: Pattern[str]
def hard_wrap(self) -> None: ...
class InlineLexer:
grammar_class: Type[InlineGrammar]
default_rules: List[str]
inline_html_rules: List[str]
renderer: Renderer
links: Dict[str, Dict[str, str]]
footnotes: Dict[str, int]
footnote_index: int
_in_link: bool
_in_footnote: bool
_parse_inline_html: bool
rules: InlineGrammar
def __init__(
self, renderer: Renderer, rules: Optional[InlineGrammar] = ..., **kwargs: Any
) -> None: ...
def __call__(self, text: str, rules: Optional[Sequence[str]] = ...) -> str: ...
def setup(
self,
links: Optional[Dict[str, Dict[str, str]]],
footnotes: Optional[Dict[str, int]],
) -> None: ...
line_match: Match[str]
line_started: bool
def output(self, text: str, rules: Optional[Sequence[str]] = ...) -> str: ...
def output_escape(self, m: Match[str]) -> str: ...
def output_autolink(self, m: Match[str]) -> str: ...
def output_url(self, m: Match[str]) -> str: ...
def output_inline_html(self, m: Match[str]) -> str: ...
def output_footnote(self, m: Match[str]) -> Optional[str]: ...
def output_link(self, m: Match[str]) -> str: ...
def output_reflink(self, m: Match[str]) -> Optional[str]: ...
def output_nolink(self, m: Match[str]) -> Optional[str]: ...
def output_double_emphasis(self, m: Match[str]) -> str: ...
def output_emphasis(self, m: Match[str]) -> str: ...
def output_code(self, m: Match[str]) -> str: ...
def output_linebreak(self, m: Match[str]) -> str: ...
def output_strikethrough(self, m: Match[str]) -> str: ...
def output_text(self, m: Match[str]) -> str: ...
class Renderer:
options: Dict[str, str]
def __init__(self, **kwargs: Any) -> None: ...
def placeholder(self) -> str: ...
def block_code(
self, code: str, lang: Any = ...
) -> str: ... # It seems that lang should be string, however other types are valid as well
def block_quote(self, text: str) -> str: ...
def block_html(self, html: str) -> str: ...
def header(self, text: str, level: int, raw: Optional[str] = ...) -> str: ...
def hrule(self) -> str: ...
def list(
self, body: Any, ordered: bool = ...
) -> str: ... # body - same reason as for lang above, and for other Any in this class
def list_item(self, text: Any) -> str: ...
def paragraph(self, text: str) -> str: ...
def table(self, header: Any, body: Any) -> str: ...
def table_row(self, content: Any) -> str: ...
def table_cell(self, content: Any, **flags: Dict[str, Any]) -> str: ...
def double_emphasis(self, text: Any) -> str: ...
def emphasis(self, text: Any) -> str: ...
def codespan(self, text: str) -> str: ...
def linebreak(self) -> str: ...
def METHOD_NAME(self, text: Any) -> str: ...
def text(self, text: Any) -> str: ...
def escape(self, text: Any) -> str: ...
def autolink(self, link: Any, is_email: bool = ...) -> str: ...
def link(self, link: Any, title: Any, text: Any) -> str: ...
def image(self, src: Any, title: Any, text: Any) -> str: ...
def inline_html(self, html: Any) -> str: ...
def newline(self) -> str: ...
def footnote_ref(self, key: Any, index: int) -> str: ...
def footnote_item(self, key: Any, text: str) -> str: ...
def footnotes(self, text: Any) -> str: ...
class Markdown:
renderer = ... # type: Renderer
inline = ... # type: InlineLexer
block = ... # type: BlockLexer
footnotes = ... # type: List[Dict[str, Any]]
tokens = ... # type: Tokens
def __init__(
self,
renderer: Optional[Renderer] = ...,
inline: Optional[InlineLexer] = ...,
block: Optional[BlockLexer] = ...,
**kwargs: Any,
) -> None: ...
def __call__(self, text: str) -> str: ...
def render(self, text: str) -> str: ...
def parse(self, text: str) -> str: ...
token = ... # type: Dict[str, Any]
def pop(self) -> Optional[Dict[str, Any]]: ...
def peek(self) -> Optional[Dict[str, Any]]: ...
def output(self, text: str, rules: Optional[Sequence[str]] = ...) -> str: ...
def tok(self) -> str: ...
def tok_text(self) -> str: ...
def output_newline(self) -> str: ...
def output_hrule(self) -> str: ...
def output_heading(self) -> str: ...
def output_code(self) -> str: ...
def output_table(self) -> str: ...
def output_block_quote(self) -> str: ...
def output_list(self) -> str: ...
def output_list_item(self) -> str: ...
def output_loose_item(self) -> str: ...
def output_footnote(self) -> str: ...
def output_close_html(self) -> str: ...
def output_open_html(self) -> str: ...
def output_paragraph(self) -> str: ...
def output_text(self) -> str: ...
def markdown(text: str, escape: bool = ..., **kwargs: Any) -> str: ... | null |
835 | import asyncio
from typing import TYPE_CHECKING, Dict, Optional
import pandas as pd
from hummingbot.client.config.config_helpers import ClientConfigAdapter
from hummingbot.client.config.security import Security
from hummingbot.client.settings import AllConnectorSettings
from hummingbot.client.ui.interface_utils import format_df_for_printout
from hummingbot.connector.connector_status import get_connector_status
from hummingbot.core.utils.async_utils import safe_ensure_future
from hummingbot.user.user_balances import UserBalances
if TYPE_CHECKING:
from hummingbot.client.hummingbot_application import HummingbotApplication # noqa: F401
OPTIONS = {cs.name for cs in AllConnectorSettings.get_connector_settings().values()
if not cs.use_ethereum_wallet and not cs.uses_gateway_generic_connector() if cs.name != "probit_kr"}
class ConnectCommand:
def connect(self, # type: HummingbotApplication
option: str):
if option is None:
safe_ensure_future(self.show_connections())
else:
safe_ensure_future(self.METHOD_NAME(option))
async def METHOD_NAME(self, # type: HummingbotApplication
connector_name):
# instruct users to use gateway connect if connector is a gateway connector
if AllConnectorSettings.get_connector_settings()[connector_name].uses_gateway_generic_connector():
self.notify("This is a gateway connector. Use `gateway connect` command instead.")
return
self.app.clear_input()
self.placeholder_mode = True
self.app.hide_input = True
if connector_name == "kraken":
self.notify("Reminder: Please ensure your Kraken API Key Nonce Window is at least 10.")
connector_config = ClientConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name))
if Security.connector_config_file_exists(connector_name):
await Security.wait_til_decryption_done()
api_key_config = [
c.printable_value for c in connector_config.traverse(secure=False) if "api_key" in c.attr
]
if api_key_config:
api_key = api_key_config[0]
prompt = (
f"Would you like to replace your existing {connector_name} API key {api_key} (Yes/No)? >>> "
)
else:
prompt = f"Would you like to replace your existing {connector_name} key (Yes/No)? >>> "
answer = await self.app.prompt(prompt=prompt)
if self.app.to_stop_config:
self.app.to_stop_config = False
return
if answer.lower() in ("yes", "y"):
previous_keys = Security.api_keys(connector_name)
await self._perform_connect(connector_config, previous_keys)
else:
await self._perform_connect(connector_config)
self.placeholder_mode = False
self.app.hide_input = False
self.app.change_prompt(prompt=">>> ")
async def show_connections(self # type: HummingbotApplication
):
self.notify("\nTesting connections, please wait...")
df, failed_msgs = await self.connection_df()
lines = [" " + line for line in format_df_for_printout(
df,
table_format=self.client_config_map.tables_format).split("\n")]
if failed_msgs:
lines.append("\nFailed connections:")
lines.extend([" " + k + ": " + v for k, v in failed_msgs.items()])
self.notify("\n".join(lines))
async def connection_df(self # type: HummingbotApplication
):
await Security.wait_til_decryption_done()
columns = ["Exchange", " Keys Added", " Keys Confirmed", " Tier"]
data = []
failed_msgs = {}
network_timeout = float(self.client_config_map.commands_timeout.other_commands_timeout)
try:
err_msgs = await asyncio.wait_for(
UserBalances.instance().update_exchanges(self.client_config_map, reconnect=True), network_timeout
)
except asyncio.TimeoutError:
self.notify("\nA network error prevented the connection table to populate. See logs for more details.")
raise
for option in sorted(OPTIONS):
keys_added = "No"
keys_confirmed = "No"
status = get_connector_status(option)
api_keys = (
Security.api_keys(option).values()
if not UserBalances.instance().is_gateway_market(option)
else {}
)
if len(api_keys) > 0:
keys_added = "Yes"
err_msg = err_msgs.get(option)
if err_msg is not None:
failed_msgs[option] = err_msg
else:
keys_confirmed = "Yes"
data.append([option, keys_added, keys_confirmed, status])
return pd.DataFrame(data=data, columns=columns), failed_msgs
async def validate_n_connect_connector(
self, # type: HummingbotApplication
connector_name: str,
) -> Optional[str]:
await Security.wait_til_decryption_done()
api_keys = Security.api_keys(connector_name)
network_timeout = float(self.client_config_map.commands_timeout.other_commands_timeout)
try:
err_msg = await asyncio.wait_for(
UserBalances.instance().add_exchange(connector_name, self.client_config_map, **api_keys),
network_timeout,
)
except asyncio.TimeoutError:
self.notify(
"\nA network error prevented the connection to complete. See logs for more details.")
self.placeholder_mode = False
self.app.hide_input = False
self.app.change_prompt(prompt=">>> ")
raise
return err_msg
async def _perform_connect(self, connector_config: ClientConfigAdapter, previous_keys: Optional[Dict] = None):
connector_name = connector_config.connector
original_config = connector_config.full_copy()
await self.prompt_for_model_config(connector_config)
self.app.change_prompt(prompt=">>> ")
if self.app.to_stop_config:
self.app.to_stop_config = False
return
Security.update_secure_config(connector_config)
err_msg = await self.validate_n_connect_connector(connector_name)
if err_msg is None:
self.notify(f"\nYou are now connected to {connector_name}.")
else:
self.notify(f"\nError: {err_msg}")
if previous_keys is not None:
Security.update_secure_config(original_config) | null |
836 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class CreateFlowlogRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'CreateFlowlog')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_CenId(self): # String
return self.get_query_params().get('CenId')
def set_CenId(self, CenId): # String
self.add_query_param('CenId', CenId)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_ProjectName(self): # String
return self.get_query_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_query_param('ProjectName', ProjectName)
def get_LogStoreName(self): # String
return self.get_query_params().get('LogStoreName')
def set_LogStoreName(self, LogStoreName): # String
self.add_query_param('LogStoreName', LogStoreName)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def METHOD_NAME(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_TransitRouterAttachmentId(self): # String
return self.get_query_params().get('TransitRouterAttachmentId')
def set_TransitRouterAttachmentId(self, TransitRouterAttachmentId): # String
self.add_query_param('TransitRouterAttachmentId', TransitRouterAttachmentId)
def get_Interval(self): # Long
return self.get_query_params().get('Interval')
def set_Interval(self, Interval): # Long
self.add_query_param('Interval', Interval)
def get_FlowLogName(self): # String
return self.get_query_params().get('FlowLogName')
def set_FlowLogName(self, FlowLogName): # String
self.add_query_param('FlowLogName', FlowLogName) | null |
837 | import urllib
import os
import webbrowser
from minigalaxy.api import Api
from minigalaxy.paths import UI_DIR, THUMBNAIL_DIR, COVER_DIR
from minigalaxy.translation import _
from minigalaxy.config import Config
from minigalaxy.download import Download
from minigalaxy.download_manager import DownloadManager
from minigalaxy.ui.gtk import Gtk, GLib, Gio, GdkPixbuf
@Gtk.Template.from_file(os.path.join(UI_DIR, "information.ui"))
class Information(Gtk.Dialog):
__gtype_name__ = "Information"
gogBaseUrl = "https://www.gog.com"
image = Gtk.Template.Child()
button_information_ok = Gtk.Template.Child()
button_information_support = Gtk.Template.Child()
button_information_store = Gtk.Template.Child()
button_information_forum = Gtk.Template.Child()
button_information_gog_database = Gtk.Template.Child()
button_information_pcgamingwiki = Gtk.Template.Child()
label_game_description = Gtk.Template.Child()
def __init__(self, parent, game, config: Config, api: Api, download_manager: DownloadManager):
Gtk.Dialog.__init__(self, title=_("Information about {}").format(game.name), parent=parent.parent.parent,
modal=True)
self.parent = parent
self.game = game
self.config = config
self.api = api
self.download_manager = download_manager
self.gamesdb_info = self.api.get_gamesdb_info(self.game)
# Show the image
self.load_thumbnail()
self.load_description()
# Center information window
self.set_position(Gtk.WindowPosition.CENTER_ALWAYS)
@Gtk.Template.Callback("on_button_information_ok_clicked")
def ok_pressed(self, button):
self.destroy()
@Gtk.Template.Callback("on_button_information_support_clicked")
def on_menu_button_support(self, widget):
try:
webbrowser.open(self.api.get_info(self.game)['links']['support'], new=2)
except webbrowser.Error:
self.parent.parent.show_error(
_("Couldn't open support page"),
_("Please check your internet connection")
)
@Gtk.Template.Callback("on_button_information_store_clicked")
def on_menu_button_store(self, widget):
try:
webbrowser.open(self.gogBaseUrl + self.game.url)
except webbrowser.Error:
self.parent.parent.show_error(
_("Couldn't open store page"),
_("Please check your internet connection")
)
@Gtk.Template.Callback("on_button_information_forum_clicked")
def on_menu_button_forum(self, widget):
try:
webbrowser.open(self.api.get_info(self.game)['links']['forum'], new=2)
except webbrowser.Error:
self.parent.parent.show_error(
_("Couldn't open forum page"),
_("Please check your internet connection")
)
@Gtk.Template.Callback("on_button_information_gog_database_clicked")
def METHOD_NAME(self, widget):
try:
webbrowser.open("https://www.gogdb.org/product/{}".format(self.game.id))
except webbrowser.Error:
self.parent.parent.show_error(
_("Couldn't open GOG Database page"),
_("Please check your internet connection")
)
@Gtk.Template.Callback("on_button_information_pcgamingwiki_clicked")
def on_menu_button_pcgamingwiki(self, widget):
try:
webbrowser.open("https://pcgamingwiki.com/api/gog.php?page={}".format(self.game.id))
except webbrowser.Error:
self.parent.parent.show_error(
_("Couldn't open PCGamingWiki page"),
_("Please check your internet connection")
)
def load_thumbnail(self):
if self.gamesdb_info["cover"]:
cover_path = os.path.join(COVER_DIR, "{}.jpg".format(self.game.id))
if os.path.isfile(cover_path):
pixbuf = GdkPixbuf.Pixbuf.new_from_file(cover_path)
pixbuf = pixbuf.scale_simple(340, 480, GdkPixbuf.InterpType.BILINEAR)
GLib.idle_add(self.image.set_from_pixbuf, pixbuf)
else:
url = "{}".format(self.gamesdb_info["cover"])
download = Download(url, cover_path)
self.download_manager.download_now(download)
response = urllib.request.urlopen(url)
input_stream = Gio.MemoryInputStream.new_from_data(response.read(), None)
pixbuf = GdkPixbuf.Pixbuf.new_from_stream(input_stream, None)
pixbuf = pixbuf.scale_simple(340, 480, GdkPixbuf.InterpType.BILINEAR)
GLib.idle_add(self.image.set_from_pixbuf, pixbuf)
else:
thumbnail_path = os.path.join(THUMBNAIL_DIR, "{}.jpg".format(self.game.id))
if not os.path.isfile(thumbnail_path) and self.game.is_installed:
thumbnail_path = os.path.join(self.game.install_dir, "thumbnail.jpg")
GLib.idle_add(self.image.set_from_file, thumbnail_path)
def load_description(self):
description = ""
lang = self.config.lang
if self.gamesdb_info["summary"]:
desc_lang = "*"
for summary_key in self.gamesdb_info["summary"].keys():
if lang in summary_key:
desc_lang = summary_key
description_len = 470
if len(self.gamesdb_info["summary"][desc_lang]) > description_len:
description = "{}...".format(self.gamesdb_info["summary"][desc_lang][:description_len])
else:
description = self.gamesdb_info["summary"][desc_lang]
if "*" in self.gamesdb_info["genre"]:
genre = self.gamesdb_info["genre"]["*"]
else:
genre = _("unknown")
for genre_key, genre_value in self.gamesdb_info["genre"].items():
if lang in genre_key:
genre = genre_value
description = "{}: {}\n{}".format(_("Genre"), genre, description)
if self.game.is_installed():
description = "{}: {}\n{}".format(_("Version"), self.game.get_info("version"), description)
GLib.idle_add(self.label_game_description.set_text, description) | null |
838 | """Launches a containerized kernel."""
import argparse
import os
import sys
import urllib3
from docker.client import DockerClient
from docker.types import EndpointSpec, RestartPolicy
urllib3.disable_warnings()
# Set env to False if the container should be left around for debug purposes, etc.
remove_container = bool(
os.getenv("REMOVE_CONTAINER", os.getenv("EG_REMOVE_CONTAINER", "True")).lower() == "true"
)
swarm_mode = bool(os.getenv("DOCKER_MODE", os.getenv("EG_DOCKER_MODE", "swarm")).lower() == "swarm")
def METHOD_NAME(
kernel_id, port_range, response_addr, public_key, spark_context_init_mode, kernel_class_name
):
"""Launches a containerized kernel."""
# Can't proceed if no image was specified.
image_name = os.environ.get("KERNEL_IMAGE", None)
if image_name is None:
sys.exit("ERROR - KERNEL_IMAGE not found in environment - kernel launch terminating!")
# Container name is composed of KERNEL_USERNAME and KERNEL_ID
container_name = os.environ.get("KERNEL_USERNAME", "") + "-" + kernel_id
# Determine network. If EG_DOCKER_NETWORK has not been propagated, fall back to 'bridge'...
docker_network = os.environ.get("DOCKER_NETWORK", os.environ.get("EG_DOCKER_NETWORK", "bridge"))
# Build labels - these will be modelled similar to kubernetes: kernel_id, component, app, ...
labels = {}
labels["kernel_id"] = kernel_id
labels["component"] = "kernel"
labels["app"] = "enterprise-gateway"
# Capture env parameters...
param_env = {}
param_env["PORT_RANGE"] = port_range
param_env["PUBLIC_KEY"] = public_key
param_env["RESPONSE_ADDRESS"] = response_addr
param_env["KERNEL_SPARK_CONTEXT_INIT_MODE"] = spark_context_init_mode
if kernel_class_name:
param_env["KERNEL_CLASS_NAME"] = kernel_class_name
# Since the environment is specific to the kernel (per env stanza of kernelspec, KERNEL_ and EG_CLIENT_ENVS)
# just add the env here.
param_env.update(os.environ)
param_env.pop(
"PATH"
) # Let the image PATH be used. Since this is relative to images, we're probably safe.
user = param_env.get("KERNEL_UID")
group = param_env.get("KERNEL_GID")
# setup common args
kwargs = {}
kwargs["name"] = container_name
kwargs["hostname"] = container_name
kwargs["user"] = user
kwargs["labels"] = labels
client = DockerClient.from_env()
if swarm_mode:
networks = []
networks.append(docker_network)
# mounts = list() # Enable if necessary
# mounts.append("/usr/local/share/jupyter/kernels:/usr/local/share/jupyter/kernels:ro")
endpoint_spec = EndpointSpec(mode="dnsrr")
restart_policy = RestartPolicy(condition="none")
# finish args setup
kwargs["env"] = param_env
kwargs["endpoint_spec"] = endpoint_spec
kwargs["restart_policy"] = restart_policy
kwargs["container_labels"] = labels
kwargs["networks"] = networks
kwargs["groups"] = [group, "100"]
if param_env.get("KERNEL_WORKING_DIR"):
kwargs["workdir"] = param_env.get("KERNEL_WORKING_DIR")
# kwargs['mounts'] = mounts # Enable if necessary
# print("service args: {}".format(kwargs)) # useful for debug
client.services.create(image_name, **kwargs)
else:
# volumes = { # Enable if necessary
# "/usr/local/share/jupyter/kernels": {
# "bind": "/usr/local/share/jupyter/kernels",
# "mode": "ro",
# }
# }
# finish args setup
kwargs["environment"] = param_env
kwargs["remove"] = remove_container
kwargs["network"] = docker_network
kwargs["group_add"] = [group, "100"]
kwargs["detach"] = True
if param_env.get("KERNEL_WORKING_DIR"):
kwargs["working_dir"] = param_env.get("KERNEL_WORKING_DIR")
# kwargs['volumes'] = volumes # Enable if necessary
# print("container args: {}".format(kwargs)) # useful for debug
client.containers.run(image_name, **kwargs)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--kernel-id",
dest="kernel_id",
nargs="?",
help="Indicates the id associated with the launched kernel.",
)
parser.add_argument(
"--port-range",
dest="port_range",
nargs="?",
metavar="<lowerPort>..<upperPort>",
help="Port range to impose for kernel ports",
)
parser.add_argument(
"--response-address",
dest="response_address",
nargs="?",
metavar="<ip>:<port>",
help="Connection address (<ip>:<port>) for returning connection file",
)
parser.add_argument(
"--public-key",
dest="public_key",
nargs="?",
help="Public key used to encrypt connection information",
)
parser.add_argument(
"--spark-context-initialization-mode",
dest="spark_context_init_mode",
nargs="?",
help="Indicates whether or how a spark context should be created",
)
parser.add_argument(
"--kernel-class-name",
dest="kernel_class_name",
nargs="?",
help="Indicates the name of the kernel class to use. Must be a subclass of 'ipykernel.kernelbase.Kernel'.",
)
# The following arguments are deprecated and will be used only if their mirroring arguments have no value.
# This means that the default value for --spark-context-initialization-mode (none) will need to come from
# the mirrored args' default until deprecated item has been removed.
parser.add_argument(
"--RemoteProcessProxy.kernel-id",
dest="rpp_kernel_id",
nargs="?",
help="Indicates the id associated with the launched kernel. (deprecated)",
)
parser.add_argument(
"--RemoteProcessProxy.port-range",
dest="rpp_port_range",
nargs="?",
metavar="<lowerPort>..<upperPort>",
help="Port range to impose for kernel ports (deprecated)",
)
parser.add_argument(
"--RemoteProcessProxy.response-address",
dest="rpp_response_address",
nargs="?",
metavar="<ip>:<port>",
help="Connection address (<ip>:<port>) for returning connection file (deprecated)",
)
parser.add_argument(
"--RemoteProcessProxy.public-key",
dest="rpp_public_key",
nargs="?",
help="Public key used to encrypt connection information (deprecated)",
)
parser.add_argument(
"--RemoteProcessProxy.spark-context-initialization-mode",
dest="rpp_spark_context_init_mode",
nargs="?",
help="Indicates whether or how a spark context should be created (deprecated)",
default="none",
)
arguments = vars(parser.parse_args())
kernel_id = arguments["kernel_id"] or arguments["rpp_kernel_id"]
port_range = arguments["port_range"] or arguments["rpp_port_range"]
response_addr = arguments["response_address"] or arguments["rpp_response_address"]
public_key = arguments["public_key"] or arguments["rpp_public_key"]
spark_context_init_mode = (
arguments["spark_context_init_mode"] or arguments["rpp_spark_context_init_mode"]
)
kernel_class_name = arguments["kernel_class_name"]
METHOD_NAME(
kernel_id, port_range, response_addr, public_key, spark_context_init_mode, kernel_class_name
) | null |
839 | #!/usr/bin/env python3
# Copyright 2020 Stanford University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import collections
import csv
import os
import sys
import chart_util as util
class Parser(util.Parser):
def __init__(self, ngraphs, dependence, nodes, system, machine, resource, threshold, x_axis, show_metg, csv_dialect):
self.ngraphs = ngraphs
self.dependence = dependence.replace('_', ' ')
self.nodes = nodes
self.system = system
self.machine = machine
self.resource = resource
self.threshold = threshold
self.x_axis = x_axis
self.show_metg = show_metg
self.csv_dialect = csv_dialect
self.header = []
self.table = collections.defaultdict(lambda: collections.defaultdict(lambda: float('inf')))
self.metg = collections.defaultdict(lambda: float('inf'))
def METHOD_NAME(self, row):
return row['ngraphs'] == self.ngraphs and row['type'] == self.dependence and row['nodes'] == self.nodes and (not self.system or row['name'] == self.system)
def process(self, row, data, metg=None):
if row['name'] not in self.header:
self.header.append(row['name'])
for values in zip(*list(data.values())):
items = dict(zip(data.keys(), values))
self.table[items[self.x_axis]][row['name']] = min(
items['%s_per_second' % self.resource],
self.table[items[self.x_axis]][row['name']],
key=float)
self.metg[items[self.x_axis]] = min(
util.get_machine_parameters(self.machine, row['processor_kind'], self.resource)['peak_%s' % self.resource] * self.threshold,
self.metg[items[self.x_axis]],
key=float)
def error_value(self):
return {}
def complete(self):
# FIXME: This isn't actually the criteria we'd like to sort on,
# we'd prefer to sort so that the list of names roughly parallels
# the order of the bars in the graph.
self.header.sort()
self.header.insert(0, self.x_axis)
if self.show_metg:
self.header.append('metg')
out = csv.DictWriter(sys.stdout, self.header, dialect=self.csv_dialect)
out.writeheader()
for iterations in sorted(self.table.keys()):
row = self.table[iterations]
row = {k: None if v == float('inf') else v for k, v in row.items()}
row[self.x_axis] = iterations
if self.show_metg:
row['metg'] = self.metg[iterations]
out.writerow(row)
def driver(ngraphs, dependence, nodes, system, machine, resource, threshold, x_axis, show_metg, csv_dialect, verbose):
parser = Parser(ngraphs, dependence, nodes, system, machine, resource, threshold, x_axis, show_metg, csv_dialect)
parser.parse(machine, resource, threshold, False, verbose)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-g', '--ngraphs', type=int, required=True)
parser.add_argument('-d', '--dependence', required=True)
parser.add_argument('-n', '--nodes', type=int, required=True)
parser.add_argument('-s', '--system')
parser.add_argument('-m', '--machine', required=True)
parser.add_argument('-r', '--resource', default='flops')
parser.add_argument('-t', '--threshold', type=float, default=0.5)
parser.add_argument('-x', '--x-axis', default='iterations')
parser.add_argument('--hide-metg', action='store_false', dest='show_metg')
parser.add_argument('--csv-dialect', default='excel-tab')
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
driver(**vars(args)) | null |
840 | #!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test version bits warning system.
Generate chains with block versions that appear to be signalling unknown
soft-forks, and test that warning alerts are generated.
"""
import os
import re
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import msg_block
from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import wait_until
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
VB_TOP_BITS = 0x20000000
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
VB_UNKNOWN_VERSION = VB_TOP_BITS | (1 << VB_UNKNOWN_BIT)
WARN_UNKNOWN_RULES_MINED = "Unknown block versions being mined! It's possible unknown rules are in effect"
WARN_UNKNOWN_RULES_ACTIVE = "unknown new rules activated (versionbit {})".format(VB_UNKNOWN_BIT)
VB_PATTERN = re.compile("Warning: unknown new rules activated.*versionbit")
class VersionBitsWarningTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
# Open and close to create zero-length file
with open(self.alert_filename, 'w', encoding='utf8'):
pass
self.extra_args = [["-alertnotify=echo %s >> \"" + self.alert_filename + "\""]]
self.setup_nodes()
def send_blocks_with_version(self, peer, numblocks, version):
"""Send numblocks blocks to peer with version set"""
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount()
block_time = self.nodes[0].getblockheader(tip)["time"] + 1
tip = int(tip, 16)
for _ in range(numblocks):
block = create_block(tip, create_coinbase(height + 1), block_time)
block.nVersion = version
block.solve()
peer.send_message(msg_block(block))
block_time += 1
height += 1
tip = block.sha256
peer.sync_with_ping()
def versionbits_in_alert_file(self):
"""Test that the versionbits warning has been written to the alert file."""
alert_text = open(self.alert_filename, 'r', encoding='utf8').read()
return VB_PATTERN.search(alert_text) is not None
def METHOD_NAME(self):
node = self.nodes[0]
node.add_p2p_connection(P2PInterface())
node_deterministic_address = node.get_deterministic_priv_key().address
# Mine one period worth of blocks
node.generatetoaddress(VB_PERIOD, node_deterministic_address)
self.log.info("Check that there is no warning if previous VB_BLOCKS have <VB_THRESHOLD blocks with unknown versionbits version.")
# Build one period of blocks with < VB_THRESHOLD blocks signaling some unknown bit
self.send_blocks_with_version(node.p2p, VB_THRESHOLD - 1, VB_UNKNOWN_VERSION)
node.generatetoaddress(VB_PERIOD - VB_THRESHOLD + 1, node_deterministic_address)
# Check that we're not getting any versionbit-related errors in get*info()
assert(not VB_PATTERN.match(node.getmininginfo()["warnings"]))
assert(not VB_PATTERN.match(node.getnetworkinfo()["warnings"]))
self.log.info("Check that there is a warning if >50 blocks in the last 100 were an unknown version")
# Build one period of blocks with VB_THRESHOLD blocks signaling some unknown bit
self.send_blocks_with_version(node.p2p, VB_THRESHOLD, VB_UNKNOWN_VERSION)
node.generatetoaddress(VB_PERIOD - VB_THRESHOLD, node_deterministic_address)
# Check that get*info() shows the 51/100 unknown block version error.
assert(WARN_UNKNOWN_RULES_MINED in node.getmininginfo()["warnings"])
assert(WARN_UNKNOWN_RULES_MINED in node.getnetworkinfo()["warnings"])
self.log.info("Check that there is a warning if previous VB_BLOCKS have >=VB_THRESHOLD blocks with unknown versionbits version.")
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared. This will move the versionbit state to ACTIVE.
node.generatetoaddress(VB_PERIOD, node_deterministic_address)
# Stop-start the node. This is required because bitcoind will only warn once about unknown versions or unknown rules activating.
self.restart_node(0)
# Generating one block guarantees that we'll get out of IBD
node.generatetoaddress(1, node_deterministic_address)
wait_until(lambda: not node.getblockchaininfo()['initialblockdownload'], timeout=10, lock=mininode_lock)
# Generating one more block will be enough to generate an error.
node.generatetoaddress(1, node_deterministic_address)
# Check that get*info() shows the versionbits unknown rules warning
assert(WARN_UNKNOWN_RULES_ACTIVE in node.getmininginfo()["warnings"])
assert(WARN_UNKNOWN_RULES_ACTIVE in node.getnetworkinfo()["warnings"])
# Check that the alert file shows the versionbits unknown rules warning
wait_until(lambda: self.versionbits_in_alert_file(), timeout=60)
if __name__ == '__main__':
VersionBitsWarningTest().main() | null |
841 | #!/usr/bin/env python
import json
import aiohttp
import asyncio
import logging
import hummingbot.connector.exchange.mexc.mexc_constants as CONSTANTS
import hummingbot.connector.exchange.mexc.mexc_utils as mexc_utils
from typing import Dict, Optional, AsyncIterable, Any, List
from hummingbot.connector.exchange.mexc.mexc_auth import MexcAuth
from hummingbot.core.api_throttler.async_throttler import AsyncThrottler
from hummingbot.logger import HummingbotLogger
class MexcWebSocketAdaptor:
DEAL_CHANNEL_ID = "push.deal"
DEPTH_CHANNEL_ID = "push.depth"
SUBSCRIPTION_LIST = set([DEAL_CHANNEL_ID, DEPTH_CHANNEL_ID])
_ID_FIELD_NAME = "id"
_logger: Optional[HummingbotLogger] = None
MESSAGE_TIMEOUT = 120.0
PING_TIMEOUT = 10.0
@classmethod
def logger(cls) -> HummingbotLogger:
if cls._logger is None:
cls._logger = logging.getLogger(__name__)
return cls._logger
def __init__(
self,
throttler: AsyncThrottler,
auth: Optional[MexcAuth] = None,
shared_client: Optional[aiohttp.ClientSession] = None,
):
self._auth: Optional[MexcAuth] = auth
self._is_private = True if self._auth is not None else False
self._WS_URL = CONSTANTS.MEXC_WS_URL_PUBLIC
self._shared_client = shared_client
self._websocket: Optional[aiohttp.ClientWebSocketResponse] = None
self._throttler = throttler
def get_shared_client(self) -> aiohttp.ClientSession:
if not self._shared_client:
self._shared_client = aiohttp.ClientSession()
return self._shared_client
async def METHOD_NAME(self, payload: Dict[str, Any]):
await self._websocket.send_json(payload)
async def send_request_str(self, payload: str):
await self._websocket.send_str(payload)
async def subscribe_to_order_book_streams(self, trading_pairs: List[str]):
try:
for trading_pair in trading_pairs:
trading_pair = mexc_utils.convert_to_exchange_trading_pair(trading_pair)
subscribe_deal_request: Dict[str, Any] = {
"op": "sub.deal",
"symbol": trading_pair,
}
async with self._throttler.execute_task(CONSTANTS.MEXC_WS_URL_PUBLIC):
await self.send_request_str(json.dumps(subscribe_deal_request))
subscribe_depth_request: Dict[str, Any] = {
"op": "sub.depth",
"symbol": trading_pair,
}
async with self._throttler.execute_task(CONSTANTS.MEXC_WS_URL_PUBLIC):
await self.send_request_str(json.dumps(subscribe_depth_request))
except asyncio.CancelledError:
raise
except Exception:
self.logger().error(
"Unexpected error occurred subscribing to order book trading and delta streams...", exc_info=True
)
raise
async def subscribe_to_user_streams(self):
pass
async def authenticate(self):
pass
async def connect(self):
try:
self._websocket = await self.get_shared_client().ws_connect(
url=self._WS_URL)
except Exception as e:
self.logger().error(f"Websocket error: '{str(e)}'", exc_info=True)
raise
# disconnect from exchange
async def disconnect(self):
if self._websocket is None:
return
await self._websocket.close()
async def iter_messages(self) -> AsyncIterable[Any]:
try:
while True:
try:
msg = await asyncio.wait_for(self._websocket.receive(), timeout=self.MESSAGE_TIMEOUT)
if msg.type == aiohttp.WSMsgType.CLOSED:
raise ConnectionError
yield json.loads(msg.data)
except asyncio.TimeoutError:
pong_waiter = self._websocket.ping()
self.logger().warning("WebSocket receive_json timeout ...")
await asyncio.wait_for(pong_waiter, timeout=self.PING_TIMEOUT)
except ConnectionError:
return | null |
842 | """
This type stub file was generated by pyright.
"""
from collections import UserDict
from celery.utils.serialization import strtobool
"""Worker remote control command implementations."""
__all__ = ("Panel",)
DEFAULT_TASK_INFO_ITEMS = ...
logger = ...
controller_info_t = ...
def ok(value): ...
def nok(value): ...
class Panel(UserDict):
"""Global registry of remote control commands."""
data = ...
meta = ...
@classmethod
def register(cls, *args, **kwargs): ...
def control_command(**kwargs): ...
def inspect_command(**kwargs): ...
@inspect_command()
def report(state): # -> dict[str, Unknown]:
"""Information about Celery installation for bug reports."""
...
@inspect_command(
alias="dump_conf",
signature="[include_defaults=False]",
args=[("with_defaults", strtobool)],
)
def conf(
state, with_defaults=..., **kwargs
): # -> dict[Unknown, Unknown | Any] | list[Unknown] | dict[Unknown, Unknown] | str:
"""List configuration."""
...
@inspect_command(variadic="ids", signature="[id1 [id2 [... [idN]]]]")
def query_task(
state, ids, **kwargs
): # -> dict[Unknown, tuple[Literal['active', 'reserved', 'ready'], Unknown]]:
"""Query for task information by id."""
...
@control_command(variadic="task_id", signature="[id1 [id2 [... [idN]]]]")
def revoke(state, task_id, terminate=..., signal=..., **kwargs): # -> dict[str, str]:
"""Revoke task by task id (or list of ids).
Keyword Arguments:
terminate (bool): Also terminate the process if the task is active.
signal (str): Name of signal to use for terminate (e.g., ``KILL``).
"""
...
@control_command(
variadic="task_id",
args=[("signal", str)],
signature="<signal> [id1 [id2 [... [idN]]]]",
)
def terminate(state, signal, task_id, **kwargs): # -> dict[str, str]:
"""Terminate task by task id (or list of ids)."""
...
@control_command(
args=[("task_name", str), ("rate_limit", str)],
signature="<task_name> <rate_limit (e.g., 5/s | 5/m | 5/h)>",
)
def rate_limit(state, task_name, rate_limit, **kwargs): # -> dict[str, str]:
"""Tell worker(s) to modify the rate limit for a task by type.
See Also:
:attr:`celery.app.task.Task.rate_limit`.
Arguments:
task_name (str): Type of task to set rate limit for.
rate_limit (int, str): New rate limit.
"""
...
@control_command(
args=[("task_name", str), ("soft", float), ("hard", float)],
signature="<task_name> <soft_secs> [hard_secs]",
)
def time_limit(state, task_name=..., hard=..., soft=..., **kwargs): # -> dict[str, str]:
"""Tell worker(s) to modify the time limit for task by type.
Arguments:
task_name (str): Name of task to change.
hard (float): Hard time limit.
soft (float): Soft time limit.
"""
...
@inspect_command()
def clock(state, **kwargs): # -> dict[str, Unknown]:
"""Get current logical clock value."""
...
@control_command()
def METHOD_NAME(state, id, topic, action=..., **kwargs): # -> None:
"""Hold election.
Arguments:
id (str): Unique election id.
topic (str): Election topic.
action (str): Action to take for elected actor.
"""
...
@control_command()
def enable_events(state): # -> dict[str, str]:
"""Tell worker(s) to send task-related events."""
...
@control_command()
def disable_events(state): # -> dict[str, str]:
"""Tell worker(s) to stop sending task-related events."""
...
@control_command()
def heartbeat(state): # -> None:
"""Tell worker(s) to send event heartbeat immediately."""
...
@inspect_command(visible=False)
def hello(
state, from_node, revoked=..., **kwargs
): # -> dict[str, Unknown | dict[Unknown, Unknown]] | None:
"""Request mingle sync-data."""
...
@inspect_command(default_timeout=0.2)
def ping(state, **kwargs): # -> dict[str, str]:
"""Ping worker(s)."""
...
@inspect_command()
def stats(state, **kwargs):
"""Request worker statistics/information."""
...
@inspect_command(alias="dump_schedule")
def scheduled(
state, **kwargs
): # -> list[dict[str, Unknown | str | dict[str, Unknown | bool | dict[str, Unknown | Any | None] | None] | None]]:
"""List of currently scheduled ETA/countdown tasks."""
...
@inspect_command(alias="dump_reserved")
def reserved(state, **kwargs): # -> list[Unknown]:
"""List of currently reserved tasks, not including scheduled/active."""
...
@inspect_command(alias="dump_active")
def active(state, safe=..., **kwargs): # -> list[Unknown]:
"""List of tasks currently being executed."""
...
@inspect_command(alias="dump_revoked")
def revoked(state, **kwargs): # -> list[Unknown]:
"""List of revoked task-ids."""
...
@inspect_command(
alias="dump_tasks",
variadic="taskinfoitems",
signature="[attr1 [attr2 [... [attrN]]]]",
)
def registered(
state, taskinfoitems=..., builtins=..., **kwargs
): # -> list[str | Unknown]:
"""List of registered tasks.
Arguments:
taskinfoitems (Sequence[str]): List of task attributes to include.
Defaults to ``exchange,routing_key,rate_limit``.
builtins (bool): Also include built-in tasks.
"""
...
@inspect_command(
default_timeout=60,
args=[("type", str), ("num", int), ("max_depth", int)],
signature="[object_type=Request] [num=200 [max_depth=10]]",
)
def objgraph(state, num=..., max_depth=..., type=...): # -> dict[str, str]:
"""Create graph of uncollected objects (memory-leak debugging).
Arguments:
num (int): Max number of objects to graph.
max_depth (int): Traverse at most n levels deep.
type (str): Name of object to graph. Default is ``"Request"``.
"""
...
@inspect_command()
def memsample(state, **kwargs): # -> str | None:
"""Sample current RSS memory usage."""
...
@inspect_command(args=[("samples", int)], signature="[n_samples=10]")
def memdump(state, samples=..., **kwargs): # -> str:
"""Dump statistics of previous memsample requests."""
...
@control_command(args=[("n", int)], signature="[N=1]")
def pool_grow(state, n=..., **kwargs): # -> dict[str, str]:
"""Grow pool by n processes/threads."""
...
@control_command(args=[("n", int)], signature="[N=1]")
def pool_shrink(state, n=..., **kwargs): # -> dict[str, str]:
"""Shrink pool by n processes/threads."""
...
@control_command()
def pool_restart(
state, modules=..., reload=..., reloader=..., **kwargs
): # -> dict[str, str]:
"""Restart execution pool."""
...
@control_command(args=[("max", int), ("min", int)], signature="[max [min]]")
def autoscale(state, max=..., min=...): # -> dict[str, str]:
"""Modify autoscale settings."""
...
@control_command()
def shutdown(state, msg=..., **kwargs):
"""Shutdown worker(s)."""
...
@control_command(
args=[
("queue", str),
("exchange", str),
("exchange_type", str),
("routing_key", str),
],
signature="<queue> [exchange [type [routing_key]]]",
)
def add_consumer(
state, queue, exchange=..., exchange_type=..., routing_key=..., **options
): # -> dict[str, str]:
"""Tell worker(s) to consume from task queue by name."""
...
@control_command(args=[("queue", str)], signature="<queue>")
def cancel_consumer(state, queue, **_): # -> dict[str, str]:
"""Tell worker(s) to stop consuming from task queue by name."""
...
@inspect_command()
def active_queues(state): # -> list[dict[Unknown, Unknown]]:
"""List the task queues a worker is currently consuming from."""
... | null |
843 | # Note: The model and training settings do not follow the reference settings
# from the paper. The settings are chosen such that the example can easily be
# run on a small dataset with a single GPU.
import torch
import torchvision
from torch import nn
from lightly.loss import SwaVLoss
from lightly.loss.memory_bank import MemoryBankModule
from lightly.models.modules import SwaVProjectionHead, SwaVPrototypes
from lightly.transforms.swav_transform import SwaVTransform
class SwaV(nn.Module):
def __init__(self, backbone):
super().__init__()
self.backbone = backbone
self.projection_head = SwaVProjectionHead(512, 512, 128)
self.prototypes = SwaVPrototypes(128, 512, 1)
self.start_queue_at_epoch = 2
self.queues = nn.ModuleList([MemoryBankModule(size=3840) for _ in range(2)])
def forward(self, high_resolution, low_resolution, epoch):
self.prototypes.normalize()
high_resolution_features = [self._subforward(x) for x in high_resolution]
low_resolution_features = [self._subforward(x) for x in low_resolution]
high_resolution_prototypes = [
self.prototypes(x, epoch) for x in high_resolution_features
]
low_resolution_prototypes = [
self.prototypes(x, epoch) for x in low_resolution_features
]
queue_prototypes = self.METHOD_NAME(high_resolution_features, epoch)
return high_resolution_prototypes, low_resolution_prototypes, queue_prototypes
def _subforward(self, input):
features = self.backbone(input).flatten(start_dim=1)
features = self.projection_head(features)
features = nn.functional.normalize(features, dim=1, p=2)
return features
@torch.no_grad()
def METHOD_NAME(self, high_resolution_features, epoch):
if len(high_resolution_features) != len(self.queues):
raise ValueError(
f"The number of queues ({len(self.queues)}) should be equal to the number of high "
f"resolution inputs ({len(high_resolution_features)}). Set `n_queues` accordingly."
)
# Get the queue features
queue_features = []
for i in range(len(self.queues)):
_, features = self.queues[i](high_resolution_features[i], update=True)
# Queue features are in (num_ftrs X queue_length) shape, while the high res
# features are in (batch_size X num_ftrs). Swap the axes for interoperability.
features = torch.permute(features, (1, 0))
queue_features.append(features)
# If loss calculation with queue prototypes starts at a later epoch,
# just queue the features and return None instead of queue prototypes.
if self.start_queue_at_epoch > 0 and epoch < self.start_queue_at_epoch:
return None
# Assign prototypes
queue_prototypes = [self.prototypes(x, epoch) for x in queue_features]
return queue_prototypes
resnet = torchvision.models.resnet18()
backbone = nn.Sequential(*list(resnet.children())[:-1])
model = SwaV(backbone)
device = "cuda" if torch.cuda.is_available() else "cpu"
model.to(device)
transform = SwaVTransform()
# we ignore object detection annotations by setting target_transform to return 0
dataset = torchvision.datasets.VOCDetection(
"datasets/pascal_voc",
download=True,
transform=transform,
target_transform=lambda t: 0,
)
# or create a dataset from a folder containing images or videos:
# dataset = LightlyDataset("path/to/folder")
dataloader = torch.utils.data.DataLoader(
dataset,
batch_size=128,
shuffle=True,
drop_last=True,
num_workers=8,
)
criterion = SwaVLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
print("Starting Training")
for epoch in range(10):
total_loss = 0
for batch in dataloader:
views = batch[0]
views = [view.to(device) for view in views]
high_resolution, low_resolution = views[:2], views[2:]
high_resolution, low_resolution, queue = model(
high_resolution, low_resolution, epoch
)
loss = criterion(high_resolution, low_resolution, queue)
total_loss += loss.detach()
loss.backward()
optimizer.step()
optimizer.zero_grad()
avg_loss = total_loss / len(dataloader)
print(f"epoch: {epoch:>02}, loss: {avg_loss:.5f}") | null |
844 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class DescribeActiveOperationTasksRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'DescribeActiveOperationTasks')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ProductId(self): # String
return self.get_query_params().get('ProductId')
def set_ProductId(self, ProductId): # String
self.add_query_param('ProductId', ProductId)
def get_ChangeLevel(self): # String
return self.get_query_params().get('ChangeLevel')
def set_ChangeLevel(self, ChangeLevel): # String
self.add_query_param('ChangeLevel', ChangeLevel)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_InsName(self): # String
return self.get_query_params().get('InsName')
def METHOD_NAME(self, InsName): # String
self.add_query_param('InsName', InsName)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_TaskType(self): # String
return self.get_query_params().get('TaskType')
def set_TaskType(self, TaskType): # String
self.add_query_param('TaskType', TaskType)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_AllowCancel(self): # Integer
return self.get_query_params().get('AllowCancel')
def set_AllowCancel(self, AllowCancel): # Integer
self.add_query_param('AllowCancel', AllowCancel)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DbType(self): # String
return self.get_query_params().get('DbType')
def set_DbType(self, DbType): # String
self.add_query_param('DbType', DbType)
def get_AllowChange(self): # Integer
return self.get_query_params().get('AllowChange')
def set_AllowChange(self, AllowChange): # Integer
self.add_query_param('AllowChange', AllowChange)
def get_Region(self): # String
return self.get_query_params().get('Region')
def set_Region(self, Region): # String
self.add_query_param('Region', Region)
def get_Status(self): # Integer
return self.get_query_params().get('Status')
def set_Status(self, Status): # Integer
self.add_query_param('Status', Status) | null |
845 | """
SoftLayer.tests.managers.account_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from unittest import mock as mock
from SoftLayer.managers.account import AccountManager as AccountManager
from SoftLayer import SoftLayerAPIError
from SoftLayer import testing
class AccountManagerTests(testing.TestCase):
def set_up(self):
self.manager = AccountManager(self.client)
self.SLNOE = 'SoftLayer_Notification_Occurrence_Event'
def test_get_summary(self):
self.manager.get_summary()
self.assert_called_with('SoftLayer_Account', 'getObject')
def test_get_planned_upcoming_events(self):
self.manager.get_upcoming_events("PLANNED")
self.assert_called_with(self.SLNOE, 'getAllObjects')
def test_get_unplanned_upcoming_events(self):
self.manager.get_upcoming_events("UNPLANNED_INCIDENT")
self.assert_called_with(self.SLNOE, 'getAllObjects')
def test_get_announcement_upcoming_events(self):
self.manager.get_upcoming_events("ANNOUNCEMENT")
self.assert_called_with(self.SLNOE, 'getAllObjects')
def test_add_planned_event_filter(self):
event_type = 'PLANNED'
_filter = {
'notificationOccurrenceEventType': {
'keyName': {
'operation': event_type
}
}
}
self.manager.add_event_filter(_filter, event_type)
def test_add_unplanned_event_filter(self):
event_type = 'UNPLANNED_INCIDENT'
_filter = {
'notificationOccurrenceEventType': {
'keyName': {
'operation': event_type
}
}
}
self.manager.add_event_filter(_filter, event_type)
def test_add_announcement_event_filter(self):
event_type = 'ANNOUNCEMENT'
_filter = {
'notificationOccurrenceEventType': {
'keyName': {
'operation': event_type
}
}
}
self.manager.add_event_filter(_filter, event_type)
def METHOD_NAME(self):
self.manager.ack_event(12345)
self.assert_called_with(self.SLNOE, 'acknowledgeNotification', identifier=12345)
def test_get_event(self):
self.manager.get_event(12345)
self.assert_called_with(self.SLNOE, 'getObject', identifier=12345)
def test_get_invoices(self):
self.manager.get_invoices()
self.assert_called_with('SoftLayer_Account', 'getInvoices')
def test_get_invoices_closed(self):
self.manager.get_invoices(closed=True)
_filter = {
'invoices': {
'createDate': {
'operation': 'orderBy',
'options': [{
'name': 'sort',
'value': ['DESC']
}]
}
}
}
self.assert_called_with('SoftLayer_Account', 'getInvoices', filter=_filter)
def test_get_billing_items(self):
self.manager.get_billing_items(12345)
self.assert_called_with('SoftLayer_Billing_Invoice', 'getInvoiceTopLevelItems')
def test_get_account_billing_items(self):
self.manager.get_account_billing_items()
object_filter = {
"allTopLevelBillingItems": {
"cancellationDate": {
"operation": "is null"
},
"id": {
"operation": "orderBy",
"options": [
{
"name": "sort",
"value": ["ASC"]
}
]
}
}
}
self.assert_called_with('SoftLayer_Account', 'getAllTopLevelBillingItems',
offset=0, limit=100, filter=object_filter)
self.manager.get_account_billing_items(mask="id")
self.assert_called_with('SoftLayer_Account', 'getAllTopLevelBillingItems', mask="mask[id]")
def test_get_billing_item(self):
self.manager.get_billing_item(12345)
self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=12345)
self.manager.get_billing_item(12345, mask="id")
self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=12345, mask="mask[id]")
def test_cancel_item(self):
self.manager.cancel_item(12345)
reason = "No longer needed"
note = "Cancelled by testAccount with the SLCLI"
self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem',
args=(False, True, reason, note), identifier=12345)
reason = "TEST"
note = "note test"
self.manager.cancel_item(12345, reason, note)
self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem',
args=(False, True, reason, note), identifier=12345)
def test_get_billing_item_from_invoice(self):
self.manager.get_billing_item_from_invoice(12345)
self.assert_called_with('SoftLayer_Billing_Invoice_Item', 'getBillingItem', identifier=12345)
def test_get_item_details_with_billing_item_id(self):
self.manager.get_item_detail(12345)
self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=12345)
def test_get_item_details_with_invoice_item_id(self):
mock = self.set_mock('SoftLayer_Billing_Item', 'getObject')
mock.side_effect = SoftLayerAPIError(404, "Unable to find object with id of '123456'.")
self.manager.get_item_detail(123456)
self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=123456)
self.assert_called_with('SoftLayer_Billing_Invoice_Item', 'getBillingItem', identifier=123456)
def test_get_routers(self):
self.manager.get_routers()
self.assert_called_with("SoftLayer_Account", "getRouters")
def test_get_active_account_licenses(self):
self.manager.get_active_account_licenses()
self.assert_called_with("SoftLayer_Account", "getActiveAccountLicenses")
def test_get_active_virtual_licenses(self):
self.manager.get_active_virtual_licenses()
self.assert_called_with("SoftLayer_Account", "getActiveVirtualLicenses")
def test_get_routers_with_datacenter(self):
self.manager.get_routers(location='dal13')
object_filter = {'routers': {'topLevelLocation': {'name': {'operation': 'dal13'}}}}
self.assert_called_with("SoftLayer_Account", "getRouters", filter=object_filter)
def test_get_bandwidth_pools(self):
self.manager.get_bandwidth_pools()
self.assert_called_with('SoftLayer_Account', 'getBandwidthAllotments', mask=mock.ANY)
def test_get_bandwidth_pool_counts(self):
total = self.manager.get_bandwidth_pool_counts(1234)
self.assert_called_with('SoftLayer_Network_Bandwidth_Version1_Allotment', 'getObject', identifier=1234)
self.assertEqual(total, 2)
def test_get_provisioning_scripts(self):
self.manager.get_provisioning_scripts()
self.assert_called_with("SoftLayer_Account", "getPostProvisioningHooks")
def test_create_provisioning_scripts(self):
self.manager.create_provisioning('testslcli', 'http://slclitest.com')
self.assert_called_with('SoftLayer_Provisioning_Hook', 'createObject')
def test_delete_provisioning_scripts(self):
self.manager.delete_provisioning(123456)
self.assert_called_with("SoftLayer_Provisioning_Hook", "deleteObject")
def test_get_upgrades_orders(self):
self.manager.get_account_upgrade_orders()
self.assert_called_with("SoftLayer_Account", "getUpgradeRequests") | null |
846 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkess.endpoint import endpoint_data
class ModifyScheduledTaskRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ess', '2014-08-28', 'ModifyScheduledTask','ess')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ScheduledAction(self): # String
return self.get_query_params().get('ScheduledAction')
def set_ScheduledAction(self, ScheduledAction): # String
self.add_query_param('ScheduledAction', ScheduledAction)
def get_MaxValue(self): # Integer
return self.get_query_params().get('MaxValue')
def set_MaxValue(self, MaxValue): # Integer
self.add_query_param('MaxValue', MaxValue)
def get_ScalingGroupId(self): # String
return self.get_query_params().get('ScalingGroupId')
def set_ScalingGroupId(self, ScalingGroupId): # String
self.add_query_param('ScalingGroupId', ScalingGroupId)
def METHOD_NAME(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_RecurrenceEndTime(self): # String
return self.get_query_params().get('RecurrenceEndTime')
def set_RecurrenceEndTime(self, RecurrenceEndTime): # String
self.add_query_param('RecurrenceEndTime', RecurrenceEndTime)
def get_LaunchTime(self): # String
return self.get_query_params().get('LaunchTime')
def set_LaunchTime(self, LaunchTime): # String
self.add_query_param('LaunchTime', LaunchTime)
def get_DesiredCapacity(self): # Integer
return self.get_query_params().get('DesiredCapacity')
def set_DesiredCapacity(self, DesiredCapacity): # Integer
self.add_query_param('DesiredCapacity', DesiredCapacity)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_RecurrenceValue(self): # String
return self.get_query_params().get('RecurrenceValue')
def set_RecurrenceValue(self, RecurrenceValue): # String
self.add_query_param('RecurrenceValue', RecurrenceValue)
def get_LaunchExpirationTime(self): # Integer
return self.get_query_params().get('LaunchExpirationTime')
def set_LaunchExpirationTime(self, LaunchExpirationTime): # Integer
self.add_query_param('LaunchExpirationTime', LaunchExpirationTime)
def get_MinValue(self): # Integer
return self.get_query_params().get('MinValue')
def set_MinValue(self, MinValue): # Integer
self.add_query_param('MinValue', MinValue)
def get_ScheduledTaskName(self): # String
return self.get_query_params().get('ScheduledTaskName')
def set_ScheduledTaskName(self, ScheduledTaskName): # String
self.add_query_param('ScheduledTaskName', ScheduledTaskName)
def get_TaskEnabled(self): # Boolean
return self.get_query_params().get('TaskEnabled')
def set_TaskEnabled(self, TaskEnabled): # Boolean
self.add_query_param('TaskEnabled', TaskEnabled)
def get_ScheduledTaskId(self): # String
return self.get_query_params().get('ScheduledTaskId')
def set_ScheduledTaskId(self, ScheduledTaskId): # String
self.add_query_param('ScheduledTaskId', ScheduledTaskId)
def get_RecurrenceType(self): # String
return self.get_query_params().get('RecurrenceType')
def set_RecurrenceType(self, RecurrenceType): # String
self.add_query_param('RecurrenceType', RecurrenceType) | null |
847 | # Copyright cocotb contributors
# Licensed under the Revised BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-3-Clause
"""
Tests of cocotb.test functionality
* expect_error
* expect_fail
* timeout
"""
from collections.abc import Coroutine
import pytest
from common import MyBaseException, MyException
import cocotb
from cocotb.triggers import NullTrigger, Timer
@cocotb.test(expect_error=NameError)
async def test_error(dut):
"""Error in the test"""
await Timer(100, "ns")
fail # noqa
@cocotb.test()
async def test_tests_are_tests(dut):
"""
Test that things annotated with cocotb.test are tests
"""
assert isinstance(test_tests_are_tests, cocotb.test)
# just to be sure...
@cocotb.test(expect_fail=True)
async def METHOD_NAME(dut):
assert False
@cocotb.test()
async def test_immediate_test(dut):
"""Test that tests can return immediately"""
return
@cocotb.test(expect_fail=True)
async def test_assertion_is_failure(dut):
assert False
@cocotb.test(expect_error=MyException)
async def test_expect_particular_exception(dut):
raise MyException()
@cocotb.test(expect_error=(MyException, ValueError))
async def test_expect_exception_list(dut):
raise MyException()
@cocotb.test(
expect_error=cocotb.result.SimTimeoutError, timeout_time=1, timeout_unit="ns"
)
async def test_timeout_testdec_fail(dut):
await Timer(10, "ns")
@cocotb.test(timeout_time=100, timeout_unit="ns")
async def test_timeout_testdec_pass(dut):
await Timer(10, "ns")
@cocotb.test(timeout_time=10, timeout_unit="ns")
async def test_timeout_testdec_simultaneous(dut):
try:
await cocotb.triggers.with_timeout(
Timer(1, "ns"), timeout_time=1, timeout_unit="ns"
)
except cocotb.result.SimTimeoutError:
pass
else:
assert False, "Expected a Timeout"
# Whether this test fails or passes depends on the behavior of the
# scheduler, simulator, and the implementation of the timeout function.
# CAUTION: THIS MAY CHANGE
# these tests should run in definition order, not lexicographic order
last_ordered_test = None
@cocotb.test()
async def test_ordering_3(dut):
global last_ordered_test
val, last_ordered_test = last_ordered_test, 3
assert val is None
@cocotb.test()
async def test_ordering_2(dut):
global last_ordered_test
val, last_ordered_test = last_ordered_test, 2
assert val == 3
@cocotb.test()
async def test_ordering_1(dut):
global last_ordered_test
val, last_ordered_test = last_ordered_test, 1
assert val == 2
@cocotb.test()
class TestClass(Coroutine):
def __init__(self, dut):
self._coro = self.run(dut)
async def run(self, dut):
pass
def send(self, value):
self._coro.send(value)
def throw(self, exception):
self._coro.throw(exception)
def __await__(self):
yield from self._coro.__await__()
@cocotb.test()
async def test_empty_docstring(dut) -> None:
""""""
@cocotb.test(expect_fail=True)
async def test_pytest_raises_fail(dut):
with pytest.raises(AssertionError):
assert True
@cocotb.test(expect_fail=True)
async def test_pytest_warns_fail(dut):
def test_func():
pass
with pytest.warns(RuntimeWarning):
test_func()
@cocotb.test(expect_fail=True)
async def test_pytest_deprecated_call_fail(dut):
def test_func():
pass
with pytest.deprecated_call():
test_func()
@cocotb.test(expect_fail=True)
async def test_pytest_raises_fail_in_task(dut):
async def test_func():
with pytest.raises(AssertionError):
assert True
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test(expect_fail=True)
async def test_pytest_warns_fail_in_task(dut):
def inner_func():
pass
async def test_func():
with pytest.warns(RuntimeWarning):
inner_func()
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test(expect_fail=True)
async def test_pytest_deprecated_call_fail_in_task(dut):
def inner_func():
pass
async def test_func():
with pytest.deprecated_call():
inner_func()
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test(expect_error=MyBaseException)
async def test_base_exception_expect_fail(dut):
raise MyBaseException
@cocotb.test(expect_error=MyBaseException)
async def test_base_exception_in_task_expect_fail(dut):
async def test_func():
raise MyBaseException
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test
async def test_without_parenthesis(dut):
pass | null |
848 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class DescribeDBInstancesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'DescribeDBInstances')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ConnectionString(self): # String
return self.get_query_params().get('ConnectionString')
def set_ConnectionString(self, ConnectionString): # String
self.add_query_param('ConnectionString', ConnectionString)
def get_EngineVersion(self): # String
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self, EngineVersion): # String
self.add_query_param('EngineVersion', EngineVersion)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_proxyId(self): # String
return self.get_query_params().get('proxyId')
def set_proxyId(self, proxyId): # String
self.add_query_param('proxyId', proxyId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBInstanceType(self): # String
return self.get_query_params().get('DBInstanceType')
def set_DBInstanceType(self, DBInstanceType): # String
self.add_query_param('DBInstanceType', DBInstanceType)
def get_DBInstanceClass(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_Tags(self): # String
return self.get_query_params().get('Tags')
def set_Tags(self, Tags): # String
self.add_query_param('Tags', Tags)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_MaxResults(self): # Integer
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Integer
self.add_query_param('MaxResults', MaxResults)
def get_InstanceNetworkType(self): # String
return self.get_query_params().get('InstanceNetworkType')
def set_InstanceNetworkType(self, InstanceNetworkType): # String
self.add_query_param('InstanceNetworkType', InstanceNetworkType)
def get_ConnectionMode(self): # String
return self.get_query_params().get('ConnectionMode')
def set_ConnectionMode(self, ConnectionMode): # String
self.add_query_param('ConnectionMode', ConnectionMode)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_InstanceLevel(self): # Integer
return self.get_query_params().get('InstanceLevel')
def set_InstanceLevel(self, InstanceLevel): # Integer
self.add_query_param('InstanceLevel', InstanceLevel)
def get_SearchKey(self): # String
return self.get_query_params().get('SearchKey')
def set_SearchKey(self, SearchKey): # String
self.add_query_param('SearchKey', SearchKey)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_Expired(self): # String
return self.get_query_params().get('Expired')
def set_Expired(self, Expired): # String
self.add_query_param('Expired', Expired)
def get_Engine(self): # String
return self.get_query_params().get('Engine')
def set_Engine(self, Engine): # String
self.add_query_param('Engine', Engine)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_DBInstanceStatus(self): # String
return self.get_query_params().get('DBInstanceStatus')
def set_DBInstanceStatus(self, DBInstanceStatus): # String
self.add_query_param('DBInstanceStatus', DBInstanceStatus)
def METHOD_NAME(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_DedicatedHostGroupId(self): # String
return self.get_query_params().get('DedicatedHostGroupId')
def set_DedicatedHostGroupId(self, DedicatedHostGroupId): # String
self.add_query_param('DedicatedHostGroupId', DedicatedHostGroupId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_DedicatedHostId(self): # String
return self.get_query_params().get('DedicatedHostId')
def set_DedicatedHostId(self, DedicatedHostId): # String
self.add_query_param('DedicatedHostId', DedicatedHostId)
def get_Filter(self): # String
return self.get_query_params().get('Filter')
def set_Filter(self, Filter): # String
self.add_query_param('Filter', Filter)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_Category(self): # String
return self.get_query_params().get('Category')
def set_Category(self, Category): # String
self.add_query_param('Category', Category)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType) | null |
849 | # Copyright (c) ZenML GmbH 2021. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Materializer for Pillow Image objects."""
import os
import tempfile
from typing import TYPE_CHECKING, Any, ClassVar, Dict, Tuple, Type
from PIL import Image
from zenml.enums import ArtifactType, VisualizationType
from zenml.io import fileio
from zenml.logger import get_logger
from zenml.materializers.base_materializer import BaseMaterializer
from zenml.utils import io_utils
if TYPE_CHECKING:
from zenml.metadata.metadata_types import MetadataType
logger = get_logger(__name__)
DEFAULT_IMAGE_FILENAME = "image_file"
DEFAULT_IMAGE_EXTENSION = "PNG"
class PillowImageMaterializer(BaseMaterializer):
"""Materializer for Image.Image objects.
This materializer takes a PIL image object and returns a PIL image object.
It handles all the source image formats supported by PIL as listed here:
https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html.
"""
ASSOCIATED_TYPES: ClassVar[Tuple[Type[Any], ...]] = (Image.Image,)
ASSOCIATED_ARTIFACT_TYPE: ClassVar[ArtifactType] = ArtifactType.DATA
def load(self, data_type: Type[Image.Image]) -> Image.Image:
"""Read from artifact store.
Args:
data_type: An Image.Image type.
Returns:
An Image.Image object.
"""
files = io_utils.find_files(self.uri, f"{DEFAULT_IMAGE_FILENAME}.*")
filepath = [file for file in files if not fileio.isdir(file)][0]
# create a temporary folder
temp_dir = tempfile.TemporaryDirectory(prefix="zenml-temp-")
temp_file = os.path.join(
temp_dir.name,
f"{DEFAULT_IMAGE_FILENAME}{os.path.splitext(filepath)[1]}",
)
# copy from artifact store to temporary file
fileio.copy(filepath, temp_file)
return Image.open(temp_file)
def METHOD_NAME(self, image: Image.Image) -> None:
"""Write to artifact store.
Args:
image: An Image.Image object.
"""
temp_dir = tempfile.TemporaryDirectory(prefix="zenml-temp-")
file_extension = image.format or DEFAULT_IMAGE_EXTENSION
full_filename = f"{DEFAULT_IMAGE_FILENAME}.{file_extension}"
temp_image_path = os.path.join(temp_dir.name, full_filename)
# save the image in a temporary directory
image.METHOD_NAME(temp_image_path)
# copy the saved image to the artifact store
artifact_store_path = os.path.join(self.uri, full_filename)
io_utils.copy(temp_image_path, artifact_store_path, overwrite=True) # type: ignore[attr-defined]
temp_dir.cleanup()
def save_visualizations(
self, image: Image.Image
) -> Dict[str, VisualizationType]:
"""Finds and saves the given image as a visualization.
Args:
image: The image to save as a visualization.
Returns:
A dictionary of visualization URIs and their types.
"""
file_extension = image.format or DEFAULT_IMAGE_EXTENSION
full_filename = f"{DEFAULT_IMAGE_FILENAME}.{file_extension}"
artifact_store_path = os.path.join(self.uri, full_filename)
return {artifact_store_path: VisualizationType.IMAGE}
def extract_metadata(
self, image: Image.Image
) -> Dict[str, "MetadataType"]:
"""Extract metadata from the given `Image` object.
Args:
image: The `Image` object to extract metadata from.
Returns:
The extracted metadata as a dictionary.
"""
metadata = {
"width": image.width,
"height": image.height,
"mode": str(image.mode),
}
if hasattr(image, "filename"):
metadata["original_filename"] = str(image.filename)
return metadata # type: ignore[return-value] | null |
850 | """ TextSynth API
Implementation provided by Fabrice Bellard:
https://github.com/EleutherAI/lm-evaluation-harness/issues/295
In order to use the API, you must have a valid TextSynth account and
enough credits.
Example usage:
python main.py --model textsynth --model_args engine=gptj_6B --no_cache --tasks piqa
Homepage: https://textsynth.com/index.html
"""
import logging
import os
import requests as _requests
import time
from tqdm import tqdm
from lm_eval.base import BaseLM
logger = logging.getLogger(__name__)
def textsynth_completion(**kwargs):
"""Query TextSynth API for completion.
Retry with back-off until they respond.
"""
backoff_time = 3
while True:
try:
return _requests.post(**kwargs)
except _requests.exceptions.RequestException:
import traceback
traceback.print_exc()
time.sleep(backoff_time)
backoff_time *= 1.5
class TextSynthLM(BaseLM):
def __init__(self, engine, truncate=False):
"""
:param engine: str
TextSynth API engine (e.g. `gptj_6B`)
:param truncate: bool
Truncate input if too long (if False and input is too long, throw error)
"""
super().__init__()
self.engine = engine
self.truncate = truncate
self.api_url = "https://api.textsynth.com"
# Read from environment variable TEXTSYNTH_API_SECRET_KEY
self.api_key = os.environ["TEXTSYNTH_API_SECRET_KEY"]
@property
def METHOD_NAME(self):
# Isn't used because we override loglikelihood, loglikelihood_rolling and greedy_until
raise NotImplementedError()
@property
def max_length(self):
# NOTE: Turn on truncation to avoid errors on long inputs.
return 2048
@property
def max_gen_toks(self):
return 256
@property
def batch_size(self):
# Isn't used because we override loglikelihood, loglikelihood_rolling and greedy_until
raise NotImplementedError()
@property
def device(self):
# Isn't used because we override loglikelihood, loglikelihood_rolling and greedy_until
raise NotImplementedError()
def tok_encode(self, string: str):
# Isn't used because we override loglikelihood, loglikelihood_rolling and greedy_until
raise NotImplementedError()
def tok_decode(self, tokens):
# Isn't used because we override loglikelihood, loglikelihood_rolling and greedy_until
raise NotImplementedError()
def loglikelihood(self, requests):
res = []
for context, continuation in tqdm(requests):
response = textsynth_completion(
url=self.api_url + "/v1/engines/" + self.engine + "/logprob",
headers={"Authorization": "Bearer " + self.api_key},
json={"context": context, "continuation": continuation},
)
resp = response.json()
if "logprob" in resp:
logprob = resp["logprob"]
is_greedy = resp["is_greedy"]
res.append((logprob, is_greedy))
else:
logger.error(
f"The following response does not contain `logprobs`. Got:\n{resp}"
)
assert False
return res
def loglikelihood_rolling(self, requests):
# TODO: The TextSynth API does not support tokenized inputs so we cannot
# manually partition long contexts into smaller rolling windows as
# done for other models derived from `BaseLM`. Override this method
# with a windowing scheme that works for direct string inputs.
raise NotImplementedError(
"`loglikelihood_rolling` is currently not supported due to lack of "
"input tokenization support from TextSynth."
)
def greedy_until(self, requests):
if not requests:
return []
res = []
for request in tqdm(requests):
inp = request[0]
request_args = request[1]
until = request_args["until"]
response = textsynth_completion(
url=self.api_url + "/v1/engines/" + self.engine + "/completions",
headers={"Authorization": "Bearer " + self.api_key},
json={
"prompt": inp,
"max_tokens": self.max_gen_toks,
"top_k": 1,
"stop": until,
},
)
resp = response.json()
if "text" in resp:
s = resp["text"]
res.append(s)
else:
logger.error(
f"The following response does not contain generated `text`. "
"Got:\n{resp}"
)
assert False
return res
def _model_call(self, inps):
# Isn't used because we override _loglikelihood_tokens
raise NotImplementedError()
def _model_generate(self, context, max_length, eos_token_id):
# Isn't used because we override greedy_until
raise NotImplementedError() | null |
851 | import numpy as np
from hexrd import constants
from hexrd.utils.decorators import numba_njit_if_available
if constants.USE_NUMBA:
from numba import prange
else:
prange = range
ap_2 = constants.cuA_2
sc = constants.sc
@numba_njit_if_available(cache=True, nogil=True)
def getPyramid(xyz):
x = xyz[0]
y = xyz[1]
z = xyz[2]
if (np.abs(x) <= z) and (np.abs(y) <= z):
return 1
elif (np.abs(x) <= -z) and (np.abs(y) <= -z):
return 2
elif (np.abs(z) <= x) and (np.abs(y) <= x):
return 3
elif (np.abs(z) <= -x) and (np.abs(y) <= -x):
return 4
elif (np.abs(x) <= y) and (np.abs(z) <= y):
return 5
elif (np.abs(x) <= -y) and (np.abs(z) <= -y):
return 6
@numba_njit_if_available(cache=True, nogil=True)
def METHOD_NAME(cu):
ho = cu2ho(cu)
return ho2ro(ho)
@numba_njit_if_available(cache=True, nogil=True)
def cu2ho(cu):
ma = np.max(np.abs(cu))
assert ma <= ap_2, "point outside cubochoric grid"
pyd = getPyramid(cu)
if pyd == 1 or pyd == 2:
sXYZ = cu
elif pyd == 3 or pyd == 4:
sXYZ = np.array([cu[1], cu[2], cu[0]])
elif pyd == 5 or pyd == 6:
sXYZ = np.array([cu[2], cu[0], cu[1]])
xyz = sXYZ * sc
ma = np.max(np.abs(xyz))
if ma < 1E-8:
return np.array([0.0, 0.0, 0.0])
ma2 = np.max(np.abs(xyz[0:2]))
if ma2 < 1E-8:
LamXYZ = np.array([0.0, 0.0, constants.pref * xyz[2]])
else:
if np.abs(xyz[1]) <= np.abs(xyz[0]):
q = (np.pi/12.0) * xyz[1]/xyz[0]
c = np.cos(q)
s = np.sin(q)
q = constants.prek * xyz[0] / np.sqrt(np.sqrt(2.0)-c)
T1 = (np.sqrt(2.0) * c - 1.0) * q
T2 = np.sqrt(2.0) * s * q
else:
q = (np.pi/12.0) * xyz[0]/xyz[1]
c = np.cos(q)
s = np.sin(q)
q = constants.prek * xyz[1] / np.sqrt(np.sqrt(2.0)-c)
T1 = np.sqrt(2.0) * s * q
T2 = (np.sqrt(2.0) * c - 1.0) * q
c = T1**2 + T2**2
s = np.pi * c / (24.0 * xyz[2]**2)
c = np.sqrt(np.pi) * c / np.sqrt(24.0) / xyz[2]
q = np.sqrt( 1.0 - s )
LamXYZ = np.array([T1 * q, T2 * q, constants.pref * xyz[2] - c])
if pyd == 1 or pyd == 2:
return LamXYZ
elif pyd == 3 or pyd == 4:
return np.array([LamXYZ[2], LamXYZ[0], LamXYZ[1]])
elif pyd == 5 or pyd == 6:
return np.array([LamXYZ[1], LamXYZ[2], LamXYZ[0]])
@numba_njit_if_available(cache=True, nogil=True)
def ho2ro(ho):
ax = ho2ax(ho)
return ax2ro(ax)
@numba_njit_if_available(cache=True, nogil=True)
def ho2ax(ho):
hmag = np.linalg.norm(ho[:])**2
if hmag < 1E-8:
return np.array([0.0, 0.0, 1.0, 0.0])
hm = hmag
hn = ho/np.sqrt(hmag)
s = constants.tfit[0] + constants.tfit[1] * hmag
for ii in range(2, 21):
hm = hm*hmag
s = s + constants.tfit[ii] * hm
s = 2.0 * np.arccos(s)
diff = np.abs(s - np.pi)
if diff < 1E-8:
return np.array([hn[0], hn[1], hn[2], np.pi])
else:
return np.array([hn[0], hn[1], hn[2], s])
@numba_njit_if_available(cache=True, nogil=True)
def ax2ro(ax):
if np.abs(ax[3]) < 1E-8:
return np.array([0.0, 0.0, 1.0, 0.0])
elif np.abs(ax[3] - np.pi) < 1E-8:
return np.array([ax[0], ax[1], ax[2], np.inf])
else:
return np.array([ax[0], ax[1], ax[2], np.tan(ax[3]*0.5)])
@numba_njit_if_available(cache=True, nogil=True)
def ro2qu(ro):
ax = ro2ax(ro)
return ax2qu(ax)
@numba_njit_if_available(cache=True, nogil=True)
def ro2ax(ro):
if np.abs(ro[3]) < 1E-8:
return np.array([0.0, 0.0, 1.0, 0.0])
elif ro[3] == np.inf:
return np.array([ro[0], ro[1], ro[2], np.pi])
else:
ang = 2.0*np.arctan(ro[3])
mag = 1.0/np.linalg.norm(ro[0:3])
return np.array([ro[0]*mag, ro[1]*mag, ro[2]*mag, ang])
@numba_njit_if_available(cache=True, nogil=True)
def ax2qu(ro):
if np.abs(ro[3]) < 1E-8:
return np.array([1.0, 0.0, 0.0, 0.0])
else:
c = np.cos(ro[3]*0.5)
s = np.sin(ro[3]*0.5)
return np.array([c, ro[0]*s, ro[1]*s, ro[2]*s]) | null |
852 | from django.db import migrations
guild_invite_whitelist = [
("discord.gg/python", "Python Discord", True),
("discord.gg/4JJdJKb", "RLBot", True),
("discord.gg/djPtTRJ", "Kivy", True),
("discord.gg/QXyegWe", "Pyglet", True),
("discord.gg/9XsucTT", "Panda3D", True),
("discord.gg/AP3rq2k", "PyWeek", True),
("discord.gg/vSPsP9t", "Microsoft Python", True),
("discord.gg/bRCvFy9", "Discord.js Official", True),
("discord.gg/9zT7NHP", "Programming Discussions", True),
("discord.gg/ysd6M4r", "JetBrains Community", True),
("discord.gg/4xJeCgy", "Raspberry Pie", True),
("discord.gg/AStb3kZ", "Ren'Py", True),
("discord.gg/t655QNV", "Python Discord: Emojis 1", True),
("discord.gg/vRZPkqC", "Python Discord: Emojis 2", True),
("discord.gg/jTtgWuy", "Django", True),
("discord.gg/W9BypZF", "STEM", True),
("discord.gg/dpy", "discord.py", True),
("discord.gg/programming", "Programmers Hangout", True),
("discord.gg/qhGUjGD", "SpeakJS", True),
("discord.gg/eTbWSZj", "Functional Programming", True),
("discord.gg/r8yreB6", "PyGame", True),
("discord.gg/5UBnR3P", "Python Atlanta", True),
("discord.gg/ccyrDKv", "C#", True),
]
domain_name_blacklist = [
("pornhub.com", None, False),
("liveleak.com", None, False),
("grabify.link", None, False),
("bmwforum.co", None, False),
("leancoding.co", None, False),
("spottyfly.com", None, False),
("stopify.co", None, False),
("yoütu.be", None, False),
("discörd.com", None, False),
("minecräft.com", None, False),
("freegiftcards.co", None, False),
("disçordapp.com", None, False),
("fortnight.space", None, False),
("fortnitechat.site", None, False),
("joinmy.site", None, False),
("curiouscat.club", None, False),
("catsnthings.fun", None, False),
("yourtube.site", None, False),
("youtubeshort.watch", None, False),
("catsnthing.com", None, False),
("youtubeshort.pro", None, False),
("canadianlumberjacks.online", None, False),
("poweredbydialup.club", None, False),
("poweredbydialup.online", None, False),
("poweredbysecurity.org", None, False),
("poweredbysecurity.online", None, False),
("ssteam.site", None, False),
("steamwalletgift.com", None, False),
("discord.gift", None, False),
("lmgtfy.com", None, False),
]
filter_token_blacklist = [
(r"\bgoo+ks*\b", None, False),
(r"\bky+s+\b", None, False),
(r"\bki+ke+s*\b", None, False),
(r"\bbeaner+s?\b", None, False),
(r"\bcoo+ns*\b", None, False),
(r"\bnig+lets*\b", None, False),
(r"\bslant-eyes*\b", None, False),
(r"\btowe?l-?head+s*\b", None, False),
(r"\bchi*n+k+s*\b", None, False),
(r"\bspick*s*\b", None, False),
(r"\bkill* +(?:yo)?urself+\b", None, False),
(r"\bjew+s*\b", None, False),
(r"\bsuicide\b", None, False),
(r"\brape\b", None, False),
(r"\b(re+)tar+(d+|t+)(ed)?\b", None, False),
(r"\bta+r+d+\b", None, False),
(r"\bcunts*\b", None, False),
(r"\btrann*y\b", None, False),
(r"\bshemale\b", None, False),
(r"fa+g+s*", None, False),
(r"卐", None, False),
(r"卍", None, False),
(r"࿖", None, False),
(r"࿕", None, False),
(r"࿘", None, False),
(r"࿗", None, False),
(r"cuck(?!oo+)", None, False),
(r"nigg+(?:e*r+|a+h*?|u+h+)s?", None, False),
(r"fag+o+t+s*", None, False),
]
file_format_whitelist = [
(".3gp", None, True),
(".3g2", None, True),
(".avi", None, True),
(".bmp", None, True),
(".gif", None, True),
(".h264", None, True),
(".jpg", None, True),
(".jpeg", None, True),
(".m4v", None, True),
(".mkv", None, True),
(".mov", None, True),
(".mp4", None, True),
(".mpeg", None, True),
(".mpg", None, True),
(".png", None, True),
(".tiff", None, True),
(".wmv", None, True),
(".svg", None, True),
(".psd", "Photoshop", True),
(".ai", "Illustrator", True),
(".aep", "After Effects", True),
(".xcf", "GIMP", True),
(".mp3", None, True),
(".wav", None, True),
(".ogg", None, True),
(".webm", None, True),
(".webp", None, True),
]
populate_data = {
"FILTER_TOKEN": filter_token_blacklist,
"DOMAIN_NAME": domain_name_blacklist,
"FILE_FORMAT": file_format_whitelist,
"GUILD_INVITE": guild_invite_whitelist,
}
class Migration(migrations.Migration):
dependencies = [("api", "0058_create_new_filterlist_model")]
def populate_filterlists(app, _):
FilterList = app.get_model("api", "FilterList")
for filterlist_type, metadata in populate_data.items():
for content, comment, allowed in metadata:
FilterList.objects.create(
type=filterlist_type,
allowed=allowed,
content=content,
comment=comment,
)
def METHOD_NAME(app, _):
FilterList = app.get_model("api", "FilterList")
FilterList.objects.all().delete()
operations = [
migrations.RunPython(populate_filterlists, METHOD_NAME)
] | null |
853 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbase.endpoint import endpoint_data
class CreateServerlessClusterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'HBase', '2019-01-01', 'CreateServerlessCluster','hbase')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientType(self):
return self.get_query_params().get('ClientType')
def set_ClientType(self,ClientType):
self.add_query_param('ClientType',ClientType)
def get_ClusterName(self):
return self.get_query_params().get('ClusterName')
def set_ClusterName(self,ClusterName):
self.add_query_param('ClusterName',ClusterName)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_EngineVersion(self):
return self.get_query_params().get('EngineVersion')
def METHOD_NAME(self,EngineVersion):
self.add_query_param('EngineVersion',EngineVersion)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_Engine(self):
return self.get_query_params().get('Engine')
def set_Engine(self,Engine):
self.add_query_param('Engine',Engine)
def get_AutoRenewPeriod(self):
return self.get_query_params().get('AutoRenewPeriod')
def set_AutoRenewPeriod(self,AutoRenewPeriod):
self.add_query_param('AutoRenewPeriod',AutoRenewPeriod)
def get_Period(self):
return self.get_query_params().get('Period')
def set_Period(self,Period):
self.add_query_param('Period',Period)
def get_DiskType(self):
return self.get_query_params().get('DiskType')
def set_DiskType(self,DiskType):
self.add_query_param('DiskType',DiskType)
def get_VSwitchId(self):
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self,VSwitchId):
self.add_query_param('VSwitchId',VSwitchId)
def get_PeriodUnit(self):
return self.get_query_params().get('PeriodUnit')
def set_PeriodUnit(self,PeriodUnit):
self.add_query_param('PeriodUnit',PeriodUnit)
def get_ServerlessCapability(self):
return self.get_query_params().get('ServerlessCapability')
def set_ServerlessCapability(self,ServerlessCapability):
self.add_query_param('ServerlessCapability',ServerlessCapability)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_ZoneId(self):
return self.get_query_params().get('ZoneId')
def set_ZoneId(self,ZoneId):
self.add_query_param('ZoneId',ZoneId)
def get_ServerlessStorage(self):
return self.get_query_params().get('ServerlessStorage')
def set_ServerlessStorage(self,ServerlessStorage):
self.add_query_param('ServerlessStorage',ServerlessStorage)
def get_PayType(self):
return self.get_query_params().get('PayType')
def set_PayType(self,PayType):
self.add_query_param('PayType',PayType)
def get_ServerlessSpec(self):
return self.get_query_params().get('ServerlessSpec')
def set_ServerlessSpec(self,ServerlessSpec):
self.add_query_param('ServerlessSpec',ServerlessSpec | null |
854 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkunimkt.endpoint import endpoint_data
class ListMediaRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'UniMkt', '2018-12-12', 'ListMedia')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_OriginSiteUserId(self): # String
return self.get_query_params().get('OriginSiteUserId')
def set_OriginSiteUserId(self, OriginSiteUserId): # String
self.add_query_param('OriginSiteUserId', OriginSiteUserId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_MediaName(self): # String
return self.get_query_params().get('MediaName')
def set_MediaName(self, MediaName): # String
self.add_query_param('MediaName', MediaName)
def get_AppName(self): # String
return self.get_query_params().get('AppName')
def set_AppName(self, AppName): # String
self.add_query_param('AppName', AppName)
def get_TenantId(self): # String
return self.get_query_params().get('TenantId')
def set_TenantId(self, TenantId): # String
self.add_query_param('TenantId', TenantId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AccessStatus(self): # String
return self.get_query_params().get('AccessStatus')
def set_AccessStatus(self, AccessStatus): # String
self.add_query_param('AccessStatus', AccessStatus)
def get_FirstScene(self): # String
return self.get_query_params().get('FirstScene')
def set_FirstScene(self, FirstScene): # String
self.add_query_param('FirstScene', FirstScene)
def get_EndCreateTime(self): # Long
return self.get_query_params().get('EndCreateTime')
def set_EndCreateTime(self, EndCreateTime): # Long
self.add_query_param('EndCreateTime', EndCreateTime)
def get_Business(self): # String
return self.get_query_params().get('Business')
def set_Business(self, Business): # String
self.add_query_param('Business', Business)
def get_Os(self): # String
return self.get_query_params().get('Os')
def set_Os(self, Os): # String
self.add_query_param('Os', Os)
def get_MediaStatus(self): # String
return self.get_query_params().get('MediaStatus')
def set_MediaStatus(self, MediaStatus): # String
self.add_query_param('MediaStatus', MediaStatus)
def get_Environment(self): # String
return self.get_query_params().get('Environment')
def set_Environment(self, Environment): # String
self.add_query_param('Environment', Environment)
def get_StartCreateTime(self): # Long
return self.get_query_params().get('StartCreateTime')
def METHOD_NAME(self, StartCreateTime): # Long
self.add_query_param('StartCreateTime', StartCreateTime)
def get_UserSite(self): # String
return self.get_query_params().get('UserSite')
def set_UserSite(self, UserSite): # String
self.add_query_param('UserSite', UserSite)
def get_SecondScene(self): # String
return self.get_query_params().get('SecondScene')
def set_SecondScene(self, SecondScene): # String
self.add_query_param('SecondScene', SecondScene)
def get_MediaType(self): # String
return self.get_query_params().get('MediaType')
def set_MediaType(self, MediaType): # String
self.add_query_param('MediaType', MediaType) | null |
855 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkmse.endpoint import endpoint_data
import json
class AddGatewayRouteRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'mse', '2019-05-31', 'AddGatewayRoute','mse')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def METHOD_NAME(self): # String
return self.get_query_params().get('DomainIdListJSON')
def set_DomainIdListJSON(self, DomainIdListJSON): # String
self.add_query_param('DomainIdListJSON', DomainIdListJSON)
def get_DomainId(self): # Long
return self.get_query_params().get('DomainId')
def set_DomainId(self, DomainId): # Long
self.add_query_param('DomainId', DomainId)
def get_RouteType(self): # String
return self.get_query_params().get('RouteType')
def set_RouteType(self, RouteType): # String
self.add_query_param('RouteType', RouteType)
def get_GatewayId(self): # Long
return self.get_query_params().get('GatewayId')
def set_GatewayId(self, GatewayId): # Long
self.add_query_param('GatewayId', GatewayId)
def get_EnableWaf(self): # Boolean
return self.get_query_params().get('EnableWaf')
def set_EnableWaf(self, EnableWaf): # Boolean
self.add_query_param('EnableWaf', EnableWaf)
def get_Predicates(self): # Struct
return self.get_query_params().get('Predicates')
def set_Predicates(self, Predicates): # Struct
self.add_query_param("Predicates", json.dumps(Predicates))
def get_DirectResponseJSON(self): # Struct
return self.get_query_params().get('DirectResponseJSON')
def set_DirectResponseJSON(self, DirectResponseJSON): # Struct
self.add_query_param("DirectResponseJSON", json.dumps(DirectResponseJSON))
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_FallbackServices(self): # Array
return self.get_query_params().get('FallbackServices')
def set_FallbackServices(self, FallbackServices): # Array
self.add_query_param("FallbackServices", json.dumps(FallbackServices))
def get_Fallback(self): # Boolean
return self.get_query_params().get('Fallback')
def set_Fallback(self, Fallback): # Boolean
self.add_query_param('Fallback', Fallback)
def get_GatewayUniqueId(self): # String
return self.get_query_params().get('GatewayUniqueId')
def set_GatewayUniqueId(self, GatewayUniqueId): # String
self.add_query_param('GatewayUniqueId', GatewayUniqueId)
def get_DestinationType(self): # String
return self.get_query_params().get('DestinationType')
def set_DestinationType(self, DestinationType): # String
self.add_query_param('DestinationType', DestinationType)
def get_Policies(self): # String
return self.get_query_params().get('Policies')
def set_Policies(self, Policies): # String
self.add_query_param('Policies', Policies)
def get_RouteOrder(self): # Integer
return self.get_query_params().get('RouteOrder')
def set_RouteOrder(self, RouteOrder): # Integer
self.add_query_param('RouteOrder', RouteOrder)
def get_Services(self): # Array
return self.get_query_params().get('Services')
def set_Services(self, Services): # Array
self.add_query_param("Services", json.dumps(Services))
def get_RedirectJSON(self): # Struct
return self.get_query_params().get('RedirectJSON')
def set_RedirectJSON(self, RedirectJSON): # Struct
self.add_query_param("RedirectJSON", json.dumps(RedirectJSON))
def get_AcceptLanguage(self): # String
return self.get_query_params().get('AcceptLanguage')
def set_AcceptLanguage(self, AcceptLanguage): # String
self.add_query_param('AcceptLanguage', AcceptLanguage) | null |
856 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class DescribeRouteEntryListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'DescribeRouteEntryList','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DestCidrBlockLists(self): # RepeatList
return self.get_query_params().get('DestCidrBlockList')
def set_DestCidrBlockLists(self, DestCidrBlockList): # RepeatList
for depth1 in range(len(DestCidrBlockList)):
self.add_query_param('DestCidrBlockList.' + str(depth1 + 1), DestCidrBlockList[depth1])
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_RouteEntryName(self): # String
return self.get_query_params().get('RouteEntryName')
def set_RouteEntryName(self, RouteEntryName): # String
self.add_query_param('RouteEntryName', RouteEntryName)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_RouteEntryType(self): # String
return self.get_query_params().get('RouteEntryType')
def set_RouteEntryType(self, RouteEntryType): # String
self.add_query_param('RouteEntryType', RouteEntryType)
def get_IpVersion(self): # String
return self.get_query_params().get('IpVersion')
def set_IpVersion(self, IpVersion): # String
self.add_query_param('IpVersion', IpVersion)
def get_NextHopId(self): # String
return self.get_query_params().get('NextHopId')
def set_NextHopId(self, NextHopId): # String
self.add_query_param('NextHopId', NextHopId)
def get_NextHopType(self): # String
return self.get_query_params().get('NextHopType')
def set_NextHopType(self, NextHopType): # String
self.add_query_param('NextHopType', NextHopType)
def get_RouteTableId(self): # String
return self.get_query_params().get('RouteTableId')
def set_RouteTableId(self, RouteTableId): # String
self.add_query_param('RouteTableId', RouteTableId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_DestinationCidrBlock(self): # String
return self.get_query_params().get('DestinationCidrBlock')
def set_DestinationCidrBlock(self, DestinationCidrBlock): # String
self.add_query_param('DestinationCidrBlock', DestinationCidrBlock)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_MaxResult(self): # Integer
return self.get_query_params().get('MaxResult')
def set_MaxResult(self, MaxResult): # Integer
self.add_query_param('MaxResult', MaxResult)
def get_ServiceType(self): # String
return self.get_query_params().get('ServiceType')
def set_ServiceType(self, ServiceType): # String
self.add_query_param('ServiceType', ServiceType)
def METHOD_NAME(self): # String
return self.get_query_params().get('RouteEntryId')
def set_RouteEntryId(self, RouteEntryId): # String
self.add_query_param('RouteEntryId', RouteEntryId) | null |
857 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkadb.endpoint import endpoint_data
class ModifyDBClusterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'adb', '2019-03-15', 'ModifyDBCluster','ads')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ElasticIOResourceSize(self): # String
return self.get_query_params().get('ElasticIOResourceSize')
def set_ElasticIOResourceSize(self, ElasticIOResourceSize): # String
self.add_query_param('ElasticIOResourceSize', ElasticIOResourceSize)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Mode(self): # String
return self.get_query_params().get('Mode')
def set_Mode(self, Mode): # String
self.add_query_param('Mode', Mode)
def get_StorageResource(self): # String
return self.get_query_params().get('StorageResource')
def set_StorageResource(self, StorageResource): # String
self.add_query_param('StorageResource', StorageResource)
def get_DBNodeClass(self): # String
return self.get_query_params().get('DBNodeClass')
def set_DBNodeClass(self, DBNodeClass): # String
self.add_query_param('DBNodeClass', DBNodeClass)
def get_DBClusterCategory(self): # String
return self.get_query_params().get('DBClusterCategory')
def set_DBClusterCategory(self, DBClusterCategory): # String
self.add_query_param('DBClusterCategory', DBClusterCategory)
def get_DiskPerformanceLevel(self): # String
return self.get_query_params().get('DiskPerformanceLevel')
def set_DiskPerformanceLevel(self, DiskPerformanceLevel): # String
self.add_query_param('DiskPerformanceLevel', DiskPerformanceLevel)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_DBClusterId(self): # String
return self.get_query_params().get('DBClusterId')
def set_DBClusterId(self, DBClusterId): # String
self.add_query_param('DBClusterId', DBClusterId)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def METHOD_NAME(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBNodeGroupCount(self): # String
return self.get_query_params().get('DBNodeGroupCount')
def set_DBNodeGroupCount(self, DBNodeGroupCount): # String
self.add_query_param('DBNodeGroupCount', DBNodeGroupCount)
def get_DBNodeStorage(self): # String
return self.get_query_params().get('DBNodeStorage')
def set_DBNodeStorage(self, DBNodeStorage): # String
self.add_query_param('DBNodeStorage', DBNodeStorage)
def get_ExecutorCount(self): # String
return self.get_query_params().get('ExecutorCount')
def set_ExecutorCount(self, ExecutorCount): # String
self.add_query_param('ExecutorCount', ExecutorCount)
def get_ModifyType(self): # String
return self.get_query_params().get('ModifyType')
def set_ModifyType(self, ModifyType): # String
self.add_query_param('ModifyType', ModifyType)
def get_ComputeResource(self): # String
return self.get_query_params().get('ComputeResource')
def set_ComputeResource(self, ComputeResource): # String
self.add_query_param('ComputeResource', ComputeResource)
def get_ElasticIOResource(self): # Integer
return self.get_query_params().get('ElasticIOResource')
def set_ElasticIOResource(self, ElasticIOResource): # Integer
self.add_query_param('ElasticIOResource', ElasticIOResource) | null |
858 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkadb.endpoint import endpoint_data
class CreateDBClusterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'adb', '2019-03-15', 'CreateDBCluster','ads')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DBClusterDescription(self): # String
return self.get_query_params().get('DBClusterDescription')
def set_DBClusterDescription(self, DBClusterDescription): # String
self.add_query_param('DBClusterDescription', DBClusterDescription)
def get_StorageType(self): # String
return self.get_query_params().get('StorageType')
def set_StorageType(self, StorageType): # String
self.add_query_param('StorageType', StorageType)
def get_Mode(self): # String
return self.get_query_params().get('Mode')
def set_Mode(self, Mode): # String
self.add_query_param('Mode', Mode)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_Period(self): # String
return self.get_query_params().get('Period')
def set_Period(self, Period): # String
self.add_query_param('Period', Period)
def get_BackupSetID(self): # String
return self.get_query_params().get('BackupSetID')
def set_BackupSetID(self, BackupSetID): # String
self.add_query_param('BackupSetID', BackupSetID)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBNodeGroupCount(self): # String
return self.get_query_params().get('DBNodeGroupCount')
def set_DBNodeGroupCount(self, DBNodeGroupCount): # String
self.add_query_param('DBNodeGroupCount', DBNodeGroupCount)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_ComputeResource(self): # String
return self.get_query_params().get('ComputeResource')
def set_ComputeResource(self, ComputeResource): # String
self.add_query_param('ComputeResource', ComputeResource)
def get_ElasticIOResource(self): # String
return self.get_query_params().get('ElasticIOResource')
def set_ElasticIOResource(self, ElasticIOResource): # String
self.add_query_param('ElasticIOResource', ElasticIOResource)
def get_SourceDBInstanceName(self): # String
return self.get_query_params().get('SourceDBInstanceName')
def set_SourceDBInstanceName(self, SourceDBInstanceName): # String
self.add_query_param('SourceDBInstanceName', SourceDBInstanceName)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_StorageResource(self): # String
return self.get_query_params().get('StorageResource')
def set_StorageResource(self, StorageResource): # String
self.add_query_param('StorageResource', StorageResource)
def get_DBClusterCategory(self): # String
return self.get_query_params().get('DBClusterCategory')
def set_DBClusterCategory(self, DBClusterCategory): # String
self.add_query_param('DBClusterCategory', DBClusterCategory)
def get_DBClusterNetworkType(self): # String
return self.get_query_params().get('DBClusterNetworkType')
def set_DBClusterNetworkType(self, DBClusterNetworkType): # String
self.add_query_param('DBClusterNetworkType', DBClusterNetworkType)
def get_RestoreTime(self): # String
return self.get_query_params().get('RestoreTime')
def set_RestoreTime(self, RestoreTime): # String
self.add_query_param('RestoreTime', RestoreTime)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_DBClusterVersion(self): # String
return self.get_query_params().get('DBClusterVersion')
def set_DBClusterVersion(self, DBClusterVersion): # String
self.add_query_param('DBClusterVersion', DBClusterVersion)
def get_DBClusterClass(self): # String
return self.get_query_params().get('DBClusterClass')
def set_DBClusterClass(self, DBClusterClass): # String
self.add_query_param('DBClusterClass', DBClusterClass)
def get_UsedTime(self): # String
return self.get_query_params().get('UsedTime')
def set_UsedTime(self, UsedTime): # String
self.add_query_param('UsedTime', UsedTime)
def get_RestoreType(self): # String
return self.get_query_params().get('RestoreType')
def set_RestoreType(self, RestoreType): # String
self.add_query_param('RestoreType', RestoreType)
def METHOD_NAME(self): # String
return self.get_query_params().get('DBNodeStorage')
def set_DBNodeStorage(self, DBNodeStorage): # String
self.add_query_param('DBNodeStorage', DBNodeStorage)
def get_ExecutorCount(self): # String
return self.get_query_params().get('ExecutorCount')
def set_ExecutorCount(self, ExecutorCount): # String
self.add_query_param('ExecutorCount', ExecutorCount)
def get_VPCId(self): # String
return self.get_query_params().get('VPCId')
def set_VPCId(self, VPCId): # String
self.add_query_param('VPCId', VPCId)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType) | null |
859 | from __future__ import annotations
import asyncio
import logging
from typing import TYPE_CHECKING
import click
from alembic import command
from alembic.config import Config
from alembic.runtime.migration import MigrationContext
from alembic.script import ScriptDirectory
from sqlalchemy.engine import Connection, Engine
from ai.backend.common.logging import BraceStyleAdapter
from ..models.alembic import invoked_programmatically
from ..models.base import metadata
from ..models.utils import create_async_engine
if TYPE_CHECKING:
from .context import CLIContext
log = BraceStyleAdapter(logging.getLogger(__spec__.name)) # type: ignore[name-defined]
@click.group()
def cli(args) -> None:
pass
@cli.command()
@click.option(
"-f",
"--alembic-config",
default="alembic.ini",
metavar="PATH",
help="The path to Alembic config file. [default: alembic.ini]",
)
@click.pass_obj
def show(cli_ctx: CLIContext, alembic_config) -> None:
"""Show the current schema information."""
def _get_current_rev_sync(connection: Connection) -> str | None:
context = MigrationContext.configure(connection)
return context.get_current_revision()
async def METHOD_NAME(sa_url: str) -> None:
invoked_programmatically.set(True)
engine = create_async_engine(sa_url)
async with engine.begin() as connection:
current_rev = await connection.run_sync(_get_current_rev_sync)
script = ScriptDirectory.from_config(alembic_cfg)
heads = script.get_heads()
head_rev = heads[0] if len(heads) > 0 else None
print(f"Current database revision: {current_rev}")
print(f"The head revision of available migrations: {head_rev}")
with cli_ctx.logger:
alembic_cfg = Config(alembic_config)
sa_url = alembic_cfg.get_main_option("sqlalchemy.url")
assert sa_url is not None
sa_url = sa_url.replace("postgresql://", "postgresql+asyncpg://")
asyncio.run(METHOD_NAME(sa_url))
@cli.command()
@click.option(
"-f",
"--alembic-config",
default="alembic.ini",
metavar="PATH",
help="The path to Alembic config file. [default: alembic.ini]",
)
@click.pass_obj
def oneshot(cli_ctx: CLIContext, alembic_config) -> None:
"""
Set up your database with one-shot schema migration instead of
iterating over multiple revisions if there is no existing database.
It uses alembic.ini to configure database connection.
Reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
#building-an-up-to-date-database-from-scratch
"""
def _get_current_rev_sync(connection: Connection) -> str | None:
context = MigrationContext.configure(connection)
return context.get_current_revision()
def _create_all_sync(connection: Connection, engine: Engine) -> None:
alembic_cfg.attributes["connection"] = connection
metadata.create_all(engine, checkfirst=False)
log.info("Stamping alembic version to head...")
script = ScriptDirectory.from_config(alembic_cfg)
head_rev = script.get_heads()[0]
connection.exec_driver_sql("CREATE TABLE alembic_version (\nversion_num varchar(32)\n);")
connection.exec_driver_sql(f"INSERT INTO alembic_version VALUES('{head_rev}')")
def _upgrade_sync(connection: Connection) -> None:
alembic_cfg.attributes["connection"] = connection
command.upgrade(alembic_cfg, "head")
async def _oneshot(sa_url: str) -> None:
invoked_programmatically.set(True)
engine = create_async_engine(sa_url)
async with engine.begin() as connection:
await connection.exec_driver_sql('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";')
current_rev = await connection.run_sync(_get_current_rev_sync)
if current_rev is None:
# For a fresh clean database, create all from scratch.
# (it will raise error if tables already exist.)
log.info("Detected a fresh new database.")
log.info("Creating tables...")
async with engine.begin() as connection:
await connection.run_sync(_create_all_sync, engine=engine.sync_engine)
else:
# If alembic version info is already available, perform incremental upgrade.
log.info("Detected an existing database.")
log.info("Performing schema upgrade to head...")
async with engine.begin() as connection:
await connection.run_sync(_upgrade_sync)
log.info(
"If you don't need old migrations, delete them and set "
'"down_revision" value in the earliest migration to "None".'
)
with cli_ctx.logger:
alembic_cfg = Config(alembic_config)
sa_url = alembic_cfg.get_main_option("sqlalchemy.url")
assert sa_url is not None
sa_url = sa_url.replace("postgresql://", "postgresql+asyncpg://")
asyncio.run(_oneshot(sa_url)) | null |
860 | # Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
from sublime import LITERAL
from NeoVintageous.tests import unittest
from NeoVintageous.nv.vi.search import reverse_search_by_pt
class Test_reverse_search_by_pt(unittest.ViewTestCase):
def test_found_literal_returns_region(self):
self.write('abc')
self.assertEqual(self.Region(0, 1), reverse_search_by_pt(self.view, 'a', start=0, end=3, flags=LITERAL))
self.assertEqual(self.Region(1, 2), reverse_search_by_pt(self.view, 'b', start=0, end=3, flags=LITERAL))
self.assertEqual(self.Region(2, 3), reverse_search_by_pt(self.view, 'c', start=0, end=3, flags=LITERAL))
def test_literal_multiline(self):
self.write('abc\ndef\nhij\ndef\nnop')
self.assertEqual(reverse_search_by_pt(self.view, 'a', 0, self.view.size(), LITERAL), self.Region(0, 1))
self.assertEqual(reverse_search_by_pt(self.view, 'b', 0, self.view.size(), LITERAL), self.Region(1, 2))
self.assertEqual(reverse_search_by_pt(self.view, 'd', 0, self.view.size(), LITERAL), self.Region(12, 13))
self.assertEqual(reverse_search_by_pt(self.view, 'd', 0, 11, LITERAL), self.Region(4, 5))
self.assertEqual(reverse_search_by_pt(self.view, 'a', 1, self.view.size(), LITERAL), None)
def METHOD_NAME(self):
self.write('abc')
self.assertEqual(None, reverse_search_by_pt(self.view, 'x', start=0, end=3, flags=LITERAL))
def test_literal_not_found_in_whitespace_returns_none(self):
self.write(' ')
self.assertEqual(None, reverse_search_by_pt(self.view, 'x', start=0, end=1, flags=LITERAL))
def test_literal_start_position_is_characterwise(self):
self.write('aaaxxx')
self.assertEqual(self.Region(2, 3), reverse_search_by_pt(self.view, 'a', start=0, end=6, flags=LITERAL))
self.assertEqual(self.Region(2, 3), reverse_search_by_pt(self.view, 'a', start=1, end=6, flags=LITERAL))
self.assertEqual(self.Region(2, 3), reverse_search_by_pt(self.view, 'a', start=2, end=6, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=3, end=6, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=4, end=6, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=5, end=6, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=6, end=6, flags=LITERAL))
def test_literal_end_position_is_characterwise(self):
self.write('xxxaaa')
self.assertEqual(self.Region(5, 6), reverse_search_by_pt(self.view, 'a', start=0, end=6, flags=LITERAL))
self.assertEqual(self.Region(4, 5), reverse_search_by_pt(self.view, 'a', start=0, end=5, flags=LITERAL))
self.assertEqual(self.Region(3, 4), reverse_search_by_pt(self.view, 'a', start=0, end=4, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=0, end=3, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=0, end=2, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=0, end=1, flags=LITERAL))
self.assertEqual(None, reverse_search_by_pt(self.view, 'a', start=0, end=0, flags=LITERAL))
def test_out_of_bounds(self):
self.normal('ab|c def')
self.assertEqual(reverse_search_by_pt(self.view, 'a', -4, self.view.size()), None)
self.assertEqual(reverse_search_by_pt(self.view, 'a', 5, self.view.size() + 1), None) | null |
861 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeInstanceHistoryEventsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeInstanceHistoryEvents','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_EventIds(self): # RepeatList
return self.get_query_params().get('EventId')
def set_EventIds(self, EventId): # RepeatList
for depth1 in range(len(EventId)):
self.add_query_param('EventId.' + str(depth1 + 1), EventId[depth1])
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_EventCycleStatus(self): # String
return self.get_query_params().get('EventCycleStatus')
def set_EventCycleStatus(self, EventCycleStatus): # String
self.add_query_param('EventCycleStatus', EventCycleStatus)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_ImpactLevel(self): # String
return self.get_query_params().get('ImpactLevel')
def set_ImpactLevel(self, ImpactLevel): # String
self.add_query_param('ImpactLevel', ImpactLevel)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def METHOD_NAME(self): # RepeatList
return self.get_query_params().get('InstanceEventCycleStatus')
def set_InstanceEventCycleStatuss(self, InstanceEventCycleStatus): # RepeatList
for depth1 in range(len(InstanceEventCycleStatus)):
self.add_query_param('InstanceEventCycleStatus.' + str(depth1 + 1), InstanceEventCycleStatus[depth1])
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_EventPublishTimeEnd(self): # String
return self.get_query_params().get('EventPublishTime.End')
def set_EventPublishTimeEnd(self, EventPublishTimeEnd): # String
self.add_query_param('EventPublishTime.End', EventPublishTimeEnd)
def get_ResourceIds(self): # RepeatList
return self.get_query_params().get('ResourceId')
def set_ResourceIds(self, ResourceId): # RepeatList
for depth1 in range(len(ResourceId)):
self.add_query_param('ResourceId.' + str(depth1 + 1), ResourceId[depth1])
def get_InstanceEventTypes(self): # RepeatList
return self.get_query_params().get('InstanceEventType')
def set_InstanceEventTypes(self, InstanceEventType): # RepeatList
for depth1 in range(len(InstanceEventType)):
self.add_query_param('InstanceEventType.' + str(depth1 + 1), InstanceEventType[depth1])
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_NotBeforeStart(self): # String
return self.get_query_params().get('NotBefore.Start')
def set_NotBeforeStart(self, NotBeforeStart): # String
self.add_query_param('NotBefore.Start', NotBeforeStart)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ResourceType(self): # String
return self.get_query_params().get('ResourceType')
def set_ResourceType(self, ResourceType): # String
self.add_query_param('ResourceType', ResourceType)
def get_EventPublishTimeStart(self): # String
return self.get_query_params().get('EventPublishTime.Start')
def set_EventPublishTimeStart(self, EventPublishTimeStart): # String
self.add_query_param('EventPublishTime.Start', EventPublishTimeStart)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_NotBeforeEnd(self): # String
return self.get_query_params().get('NotBefore.End')
def set_NotBeforeEnd(self, NotBeforeEnd): # String
self.add_query_param('NotBefore.End', NotBeforeEnd)
def get_EventType(self): # String
return self.get_query_params().get('EventType')
def set_EventType(self, EventType): # String
self.add_query_param('EventType', EventType) | null |
862 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class CopySnapshotRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'CopySnapshot','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_SnapshotId(self): # String
return self.get_query_params().get('SnapshotId')
def set_SnapshotId(self, SnapshotId): # String
self.add_query_param('SnapshotId', SnapshotId)
def get_DestinationRegionId(self): # String
return self.get_query_params().get('DestinationRegionId')
def set_DestinationRegionId(self, DestinationRegionId): # String
self.add_query_param('DestinationRegionId', DestinationRegionId)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_Arns(self): # RepeatList
return self.get_query_params().get('Arn')
def set_Arns(self, Arn): # RepeatList
for depth1 in range(len(Arn)):
if Arn[depth1].get('RoleType') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.RoleType', Arn[depth1].get('RoleType'))
if Arn[depth1].get('Rolearn') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.Rolearn', Arn[depth1].get('Rolearn'))
if Arn[depth1].get('AssumeRoleFor') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.AssumeRoleFor', Arn[depth1].get('AssumeRoleFor'))
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DestinationSnapshotName(self): # String
return self.get_query_params().get('DestinationSnapshotName')
def METHOD_NAME(self, DestinationSnapshotName): # String
self.add_query_param('DestinationSnapshotName', DestinationSnapshotName)
def get_DestinationSnapshotDescription(self): # String
return self.get_query_params().get('DestinationSnapshotDescription')
def set_DestinationSnapshotDescription(self, DestinationSnapshotDescription): # String
self.add_query_param('DestinationSnapshotDescription', DestinationSnapshotDescription)
def get_Encrypted(self): # Boolean
return self.get_query_params().get('Encrypted')
def set_Encrypted(self, Encrypted): # Boolean
self.add_query_param('Encrypted', Encrypted)
def get_RetentionDays(self): # Integer
return self.get_query_params().get('RetentionDays')
def set_RetentionDays(self, RetentionDays): # Integer
self.add_query_param('RetentionDays', RetentionDays)
def get_KMSKeyId(self): # String
return self.get_query_params().get('KMSKeyId')
def set_KMSKeyId(self, KMSKeyId): # String
self.add_query_param('KMSKeyId', KMSKeyId)
def get_DestinationStorageLocationArn(self): # String
return self.get_query_params().get('DestinationStorageLocationArn')
def set_DestinationStorageLocationArn(self, DestinationStorageLocationArn): # String
self.add_query_param('DestinationStorageLocationArn', DestinationStorageLocationArn) | null |
863 | #!/usr/bin/env python3
#
# Copyright (c) 2015 - 2023, Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
#
'''
Run any GEOPM appconf with an experiment script. Works with appconfs that
support ``create_appconf(Machine, argv)``, and with experiments that support
``launch(app_conf, args, experiment_cli_args)``. Both appconf and experiment
must also support ``setup_run_args(parser)``.
'''
import apps
import experiment
import argparse
import importlib
import inspect
import os
import pkgutil
from experiment import machine
def list_compatible_apps(args, parent_parser):
"""List the apps that can be launched with this experiment script.
"""
compatible_module_names = list()
all_app_packages = list(
m.name
for m in pkgutil.iter_modules([os.path.dirname(apps.__file__)])
if m.ispkg)
for package_name in all_app_packages:
try:
module = importlib.import_module(f'apps.{package_name}.{package_name}')
except ModuleNotFoundError:
# Do not list apps that we cannot use
continue
global_functions = set(
name for name, function in inspect.getmembers(module, inspect.isfunction))
if global_functions.issuperset({'create_appconf', 'setup_run_args'}):
compatible_module_names.append(package_name)
print('\n'.join(compatible_module_names))
def list_compatible_experiments(args, parent_parser):
"""List the experiments that can be launched with this experiment script.
"""
compatible_module_names = list()
all_experiment_packages = list(
m.name
for m in pkgutil.iter_modules([os.path.dirname(experiment.__file__)])
if m.ispkg)
for package_name in all_experiment_packages:
try:
module = importlib.import_module(f'experiment.{package_name}.{package_name}')
except ModuleNotFoundError:
# Do not list experiments that we cannot use
continue
global_functions = set(
name for name, function in inspect.getmembers(module, inspect.isfunction))
if global_functions.issuperset({'launch', 'setup_run_args'}):
compatible_module_names.append(package_name)
print('\n'.join(compatible_module_names))
def METHOD_NAME(args, parent_parser):
"""Launch an app with the selected experiment type.
"""
app_name = args.application
experiment_name = args.experiment
app_experiment_args = args.app_experiment_args
app_module = importlib.import_module(f'apps.{app_name}.{app_name}')
experiment_module = importlib.import_module(f'experiment.{experiment_name}.{experiment_name}')
parser = argparse.ArgumentParser()
experiment_module.setup_run_args(parser)
app_module.setup_run_args(parser)
known_args, extra_args = parser.parse_known_args(app_experiment_args)
mach = machine.init_output_dir(known_args.output_dir)
app_conf = app_module.create_appconf(mach, known_args)
experiment_module.launch(app_conf=app_conf, args=known_args,
experiment_cli_args=extra_args)
def show_help(args, parent_parser):
"""Show the help message for a given app and/or experiment, or show the
default help message if no app or experiment is provided.
"""
try:
app_name = args.application
experiment_name = args.experiment
except AttributeError:
# Catch the case where the user wants help with the wrapper interface itself
app_name = None
experiment_name = None
parser = argparse.ArgumentParser()
if app_name is not None:
try:
app_module = importlib.import_module(f'apps.{app_name}.{app_name}')
except ModuleNotFoundError:
parent_parser.error(f'Cannot find module: {app_name}')
app_module.setup_run_args(parser)
if experiment_name is not None:
try:
experiment_module = importlib.import_module(f'experiment.{experiment_name}.{experiment_name}')
except ModuleNotFoundError:
parent_parser.error(f'Cannot find module: {experiment_name}')
experiment_module.setup_run_args(parser)
if experiment_name is not None or app_name is not None:
# Either app, experiment, or both were provided. Show the help message
# from that (or those) module(s).
parser.print_help()
else:
# No app or experiment was provided. Show the help message for this CLI
parent_parser.print_help()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.set_defaults(func=show_help)
subparsers = parser.add_subparsers()
parser_list_apps = subparsers.add_parser(
'apps', help='List the applications can run with this CLI')
parser_list_apps.set_defaults(func=list_compatible_apps)
parser_list_experiments = subparsers.add_parser(
'experiments', help='List the experiments can run with this CLI')
parser_list_experiments.set_defaults(func=list_compatible_experiments)
parser_run = subparsers.add_parser(
'run', help='Run an application with the selected experiment type')
parser_run.add_argument('experiment', help='Name of an experiment to run')
parser_run.add_argument('application', help='Name of an application to run')
parser_run.add_argument('app_experiment_args',
nargs=argparse.REMAINDER,
help='Arguments for the experiment and app.')
parser_run.set_defaults(func=METHOD_NAME)
parser_help = subparsers.add_parser(
'help', help='Show help for an application with the experiment')
parser_help.add_argument('--experiment', help='Name of an experiment')
parser_help.add_argument('--application', help='Name of an application')
parser_help.set_defaults(func=show_help)
args = parser.parse_args()
if args.func is not None:
args.func(args, parser) | null |
864 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkalb.endpoint import endpoint_data
class CreateHealthCheckTemplateRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Alb', '2020-06-16', 'CreateHealthCheckTemplate','alb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_HealthCheckTimeout(self): # Integer
return self.get_query_params().get('HealthCheckTimeout')
def set_HealthCheckTimeout(self, HealthCheckTimeout): # Integer
self.add_query_param('HealthCheckTimeout', HealthCheckTimeout)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_HealthCheckProtocol(self): # String
return self.get_query_params().get('HealthCheckProtocol')
def set_HealthCheckProtocol(self, HealthCheckProtocol): # String
self.add_query_param('HealthCheckProtocol', HealthCheckProtocol)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_HealthCheckPath(self): # String
return self.get_query_params().get('HealthCheckPath')
def set_HealthCheckPath(self, HealthCheckPath): # String
self.add_query_param('HealthCheckPath', HealthCheckPath)
def get_HealthCheckCodes(self): # Array
return self.get_query_params().get('HealthCheckCodes')
def set_HealthCheckCodes(self, HealthCheckCodes): # Array
for index1, value1 in enumerate(HealthCheckCodes):
self.add_query_param('HealthCheckCodes.' + str(index1 + 1), value1)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_HealthCheckMethod(self): # String
return self.get_query_params().get('HealthCheckMethod')
def set_HealthCheckMethod(self, HealthCheckMethod): # String
self.add_query_param('HealthCheckMethod', HealthCheckMethod)
def get_HealthCheckHost(self): # String
return self.get_query_params().get('HealthCheckHost')
def set_HealthCheckHost(self, HealthCheckHost): # String
self.add_query_param('HealthCheckHost', HealthCheckHost)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_HealthCheckTemplateName(self): # String
return self.get_query_params().get('HealthCheckTemplateName')
def set_HealthCheckTemplateName(self, HealthCheckTemplateName): # String
self.add_query_param('HealthCheckTemplateName', HealthCheckTemplateName)
def get_HealthCheckHttpVersion(self): # String
return self.get_query_params().get('HealthCheckHttpVersion')
def set_HealthCheckHttpVersion(self, HealthCheckHttpVersion): # String
self.add_query_param('HealthCheckHttpVersion', HealthCheckHttpVersion)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort) | null |
865 | from typing import Any, AnyStr, Callable, ContextManager, Generic, IO, Iterable, Iterator, List, Optional, Text, Type, Union
from typing_extensions import Final, Literal
import os
import sys
class _FNMatcher(Generic[AnyStr]):
pattern: AnyStr = ...
def __init__(self, pattern: AnyStr) -> None: ...
def __call__(self, path: local) -> bool: ...
class _Stat:
path: Final[local] = ...
mode: Final[int]
ino: Final[int]
dev: Final[int]
nlink: Final[int]
uid: Final[int]
gid: Final[int]
size: Final[int]
atime: Final[float]
mtime: Final[float]
ctime: Final[float]
atime_ns: Final[int]
mtime_ns: Final[int]
ctime_ns: Final[int]
if sys.version_info >= (3, 8) and sys.platform == "win32":
reparse_tag: Final[int]
blocks: Final[int]
blksize: Final[int]
rdev: Final[int]
flags: Final[int]
gen: Final[int]
birthtime: Final[int]
rsize: Final[int]
creator: Final[int]
type: Final[int]
if sys.platform != 'win32':
@property
def owner(self) -> str: ...
@property
def group(self) -> str: ...
def isdir(self) -> bool: ...
def isfile(self) -> bool: ...
def islink(self) -> bool: ...
if sys.version_info >= (3, 6):
_PathLike = os.PathLike
else:
class _PathLike(Generic[AnyStr]):
def __fspath__(self) -> AnyStr: ...
_PathType = Union[bytes, Text, _PathLike[str], _PathLike[bytes], local]
class local(_PathLike[str]):
class ImportMismatchError(ImportError): ...
sep: Final[str]
strpath: Final[str]
def __init__(self, path: _PathType = ..., expanduser: bool = ...) -> None: ...
def __hash__(self) -> int: ...
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __lt__(self, other: object) -> bool: ...
def __gt__(self, other: object) -> bool: ...
def __add__(self, other: object) -> local: ...
def __cmp__(self, other: object) -> int: ...
def __div__(self, other: _PathType) -> local: ...
def __truediv__(self, other: _PathType) -> local: ...
def __fspath__(self) -> str: ...
@classmethod
def get_temproot(cls) -> local: ...
@classmethod
def make_numbered_dir(
cls,
prefix: str = ...,
rootdir: Optional[local] = ...,
keep: Optional[int] = ...,
lock_timeout: int = ...,
) -> local: ...
@classmethod
def mkdtemp(cls, rootdir: Optional[local] = ...) -> local: ...
@classmethod
def sysfind(
cls,
name: _PathType,
checker: Optional[Callable[[local], bool]] = ...,
paths: Optional[Iterable[_PathType]] = ...,
) -> Optional[local]: ...
@property
def basename(self) -> str: ...
@property
def dirname(self) -> str: ...
@property
def purebasename(self) -> str: ...
@property
def ext(self) -> str: ...
def as_cwd(self) -> ContextManager[Optional[local]]: ...
def atime(self) -> float: ...
def bestrelpath(self, dest: local) -> str: ...
def chdir(self) -> local: ...
def check(
self,
*,
basename: int = ..., notbasename: int = ...,
basestarts: int = ..., notbasestarts: int = ...,
dir: int = ..., notdir: int = ...,
dotfile: int = ..., notdotfile: int = ...,
endswith: int = ..., notendswith: int = ...,
exists: int = ..., notexists: int = ...,
ext: int = ..., notext: int = ...,
file: int = ..., notfile: int = ...,
fnmatch: int = ..., notfnmatch: int = ...,
link: int = ..., notlink: int = ...,
relto: int = ..., notrelto: int = ...,
) -> bool: ...
def chmod(self, mode: int, rec: Union[int, str, Text, Callable[[local], bool]] = ...) -> None: ...
if sys.platform != 'win32':
def chown(self, user: Union[int, str], group: Union[int, str], rec: int = ...) -> None: ...
def common(self, other: local) -> Optional[local]: ...
def computehash(self, hashtype: str = ..., chunksize: int = ...) -> str: ...
def copy(self, target: local, mode: bool = ..., stat: bool = ...) -> None: ...
def dirpath(self, *args: _PathType, abs: int = ...) -> local: ...
def dump(self, obj: Any, bin: Optional[int] = ...) -> None: ...
def ensure(self, *args: _PathType, dir: int = ...) -> local: ...
def ensure_dir(self, *args: _PathType) -> local: ...
def exists(self) -> bool: ...
def fnmatch(self, pattern: str): _FNMatcher
def isdir(self) -> bool: ...
def isfile(self) -> bool: ...
def islink(self) -> bool: ...
def join(self, *args: _PathType, abs: int = ...) -> local: ...
def listdir(
self,
fil: Optional[Union[str, Text, Callable[[local], bool]]] = ...,
sort: Optional[bool] = ...,
) -> List[local]: ...
def load(self) -> Any: ...
def lstat(self) -> _Stat: ...
def mkdir(self, *args: _PathType) -> local: ...
if sys.platform != 'win32':
def mklinkto(self, oldname: Union[str, local]) -> None: ...
def mksymlinkto(self, value: local, absolute: int = ...) -> None: ...
def move(self, target: local) -> None: ...
def mtime(self) -> float: ...
def new(
self,
*,
drive: str = ...,
dirname: str = ...,
basename: str = ...,
purebasename: str = ...,
ext: str = ...,
) -> local: ...
def open(self, mode: str = ..., ensure: bool = ..., encoding: Optional[str] = ...) -> IO[Any]: ...
def parts(self, reverse: bool = ...) -> List[local]: ...
def pyimport(
self,
modname: Optional[str] = ...,
ensuresyspath: Union[bool, Literal["append", "importlib"]] = ...,
) -> Any: ...
def METHOD_NAME(self) -> Optional[local]: ...
def read(self, mode: str = ...) -> Union[Text, bytes]: ...
def read_binary(self) -> bytes: ...
def read_text(self, encoding: str) -> Text: ...
def readlines(self, cr: int = ...) -> List[str]: ...
if sys.platform != 'win32':
def readlink(self) -> str: ...
def realpath(self) -> local: ...
def relto(self, relpath: Union[str, local]) -> str: ...
def remove(self, rec: int = ..., ignore_errors: bool = ...) -> None: ...
def rename(self, target: _PathType) -> None: ...
def samefile(self, other: _PathType) -> bool: ...
def setmtime(self, mtime: Optional[float] = ...) -> None: ...
def size(self) -> int: ...
def stat(self, raising: bool = ...) -> _Stat: ...
def sysexec(self, *argv: Any, **popen_opts: Any) -> Text: ...
def visit(
self,
fil: Optional[Union[str, Text, Callable[[local], bool]]] = ...,
rec: Optional[Union[Literal[1, True], str, Text, Callable[[local], bool]]] = ...,
ignore: Type[Exception] = ...,
bf: bool = ...,
sort: bool = ...,
) -> Iterator[local]: ...
def write(self, data: Any, mode: str = ..., ensure: bool = ...) -> None: ...
def write_binary(self, data: bytes, ensure: bool = ...) -> None: ...
def write_text(self, data: Union[str, Text], encoding: str, ensure: bool = ...) -> None: ...
# Untyped types below here.
svnwc: Any
svnurl: Any
SvnAuth: Any | null |
866 | # SPDX-FileCopyrightText: 2021 Melissa LeBlanc-Williams for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""Based on https://raw.githubusercontent.com/micropython/micropython-lib/cfa1b9cce0c93a3115bbff3886c9bbcddd9e8047/unittest/unittest.py """
import sys
class SkipTest(Exception):
pass
raiseException = False
raiseBaseException = True
class AssertRaisesContext:
def __init__(self, exc):
self.expected = exc
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
assert False, "%r not raised" % self.expected
if issubclass(exc_type, self.expected):
return True
return False
class TestCase:
def fail(self, msg=""):
assert False, msg
def assertEqual(self, x, y, msg=""):
if not msg:
msg = "%r vs (expected) %r" % (x, y)
assert x == y, msg
def assertNotEqual(self, x, y, msg=""):
if not msg:
msg = "%r not expected to be equal %r" % (x, y)
assert x != y, msg
def assertAlmostEqual(self, x, y, places=None, msg="", delta=None):
if x == y:
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(x - y) <= delta:
return
if not msg:
msg = "%r != %r within %r delta" % (x, y, delta)
else:
if places is None:
places = 7
if round(abs(y - x), places) == 0:
return
if not msg:
msg = "%r != %r within %r places" % (x, y, places)
assert False, msg
def assertNotAlmostEqual(self, x, y, places=None, msg="", delta=None):
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (x == y) and abs(x - y) > delta:
return
if not msg:
msg = "%r == %r within %r delta" % (x, y, delta)
else:
if places is None:
places = 7
if not (x == y) and round(abs(y - x), places) != 0:
return
if not msg:
msg = "%r == %r within %r places" % (x, y, places)
assert False, msg
def assertIs(self, x, y, msg=""):
if not msg:
msg = "%r is not %r" % (x, y)
assert x is y, msg
def assertIsNot(self, x, y, msg=""):
if not msg:
msg = "%r is %r" % (x, y)
assert x is not y, msg
def assertIsNone(self, x, msg=""):
if not msg:
msg = "%r is not None" % x
assert x is None, msg
def assertIsNotNone(self, x, msg=""):
if not msg:
msg = "%r is None" % x
assert x is not None, msg
def assertTrue(self, x, msg=""):
if not msg:
msg = "Expected %r to be True" % x
assert x, msg
def assertFalse(self, x, msg=""):
if not msg:
msg = "Expected %r to be False" % x
assert not x, msg
def assertIn(self, x, y, msg=""):
if not msg:
msg = "Expected %r to be in %r" % (x, y)
assert x in y, msg
def assertIsInstance(self, x, y, msg=""):
assert isinstance(x, y), msg
def assertRaises(self, exc, func=None, *args, **kwargs):
if func is None:
return AssertRaisesContext(exc)
try:
func(*args, **kwargs)
assert False, "%r not raised" % exc
except Exception as e:
if isinstance(e, exc):
return
raise
def skip(msg):
def METHOD_NAME(fun):
# We just replace original fun with _inner
def _inner(self):
raise SkipTest(msg)
return _inner
return METHOD_NAME
def skipIf(cond, msg):
if not cond:
return lambda x: x
return skip(msg)
def skipUnless(cond, msg):
if cond:
return lambda x: x
return skip(msg)
class TestSuite:
def __init__(self):
self.tests = []
def addTest(self, cls):
self.tests.append(cls)
class TestRunner:
def run(self, suite):
res = TestResult()
for c in suite.tests:
run_class(c, res)
print("Ran %d tests\n" % res.testsRun)
if res.failuresNum > 0 or res.errorsNum > 0:
print("FAILED (failures=%d, errors=%d)" % (res.failuresNum, res.errorsNum))
else:
msg = "OK"
if res.skippedNum > 0:
msg += " (%d skipped)" % res.skippedNum
print(msg)
return res
class TestResult:
def __init__(self):
self.errorsNum = 0
self.failuresNum = 0
self.skippedNum = 0
self.testsRun = 0
def wasSuccessful(self):
return self.errorsNum == 0 and self.failuresNum == 0
# TODO: Uncompliant
def run_class(c, test_result):
o = c()
set_up = getattr(o, "setUp", lambda: None)
tear_down = getattr(o, "tearDown", lambda: None)
for name in dir(o):
if name.startswith("test"):
print("%s (%s) ..." % (name, c.__qualname__), end="")
m = getattr(o, name)
set_up()
try:
test_result.testsRun += 1
m()
print(" ok")
except SkipTest as e:
print(" skipped:", e.args[0])
test_result.skippedNum += 1
except Exception as e: # user exception
print(" FAIL")
if raiseException:
raise
else:
print(e)
test_result.failuresNum += 1
continue
except BaseException as e: # system exception
print(" FAIL")
if raiseBaseException:
raise
else:
print(e)
test_result.failuresNum += 1
continue
finally:
tear_down()
def main(module="__main__"):
def test_cases(m):
for tn in dir(m):
c = getattr(m, tn)
if (
isinstance(c, object)
and isinstance(c, type)
and issubclass(c, TestCase)
):
yield c
m = __import__(module) # changed to permit non-top-level testing modules
suite = TestSuite()
for c in test_cases(m):
suite.addTest(c)
runner = TestRunner()
result = runner.run(suite) | null |
867 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class CreateDdrInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'CreateDdrInstance')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DBInstanceStorage(self): # Integer
return self.get_query_params().get('DBInstanceStorage')
def set_DBInstanceStorage(self, DBInstanceStorage): # Integer
self.add_query_param('DBInstanceStorage', DBInstanceStorage)
def get_SystemDBCharset(self): # String
return self.get_query_params().get('SystemDBCharset')
def set_SystemDBCharset(self, SystemDBCharset): # String
self.add_query_param('SystemDBCharset', SystemDBCharset)
def get_EngineVersion(self): # String
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self, EngineVersion): # String
self.add_query_param('EngineVersion', EngineVersion)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_DBInstanceDescription(self): # String
return self.get_query_params().get('DBInstanceDescription')
def set_DBInstanceDescription(self, DBInstanceDescription): # String
self.add_query_param('DBInstanceDescription', DBInstanceDescription)
def get_Period(self): # String
return self.get_query_params().get('Period')
def set_Period(self, Period): # String
self.add_query_param('Period', Period)
def get_BackupSetId(self): # String
return self.get_query_params().get('BackupSetId')
def set_BackupSetId(self, BackupSetId): # String
self.add_query_param('BackupSetId', BackupSetId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def METHOD_NAME(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_SecurityIPList(self): # String
return self.get_query_params().get('SecurityIPList')
def set_SecurityIPList(self, SecurityIPList): # String
self.add_query_param('SecurityIPList', SecurityIPList)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_PrivateIpAddress(self): # String
return self.get_query_params().get('PrivateIpAddress')
def set_PrivateIpAddress(self, PrivateIpAddress): # String
self.add_query_param('PrivateIpAddress', PrivateIpAddress)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_InstanceNetworkType(self): # String
return self.get_query_params().get('InstanceNetworkType')
def set_InstanceNetworkType(self, InstanceNetworkType): # String
self.add_query_param('InstanceNetworkType', InstanceNetworkType)
def get_ConnectionMode(self): # String
return self.get_query_params().get('ConnectionMode')
def set_ConnectionMode(self, ConnectionMode): # String
self.add_query_param('ConnectionMode', ConnectionMode)
def get_SourceDBInstanceName(self): # String
return self.get_query_params().get('SourceDBInstanceName')
def set_SourceDBInstanceName(self, SourceDBInstanceName): # String
self.add_query_param('SourceDBInstanceName', SourceDBInstanceName)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Engine(self): # String
return self.get_query_params().get('Engine')
def set_Engine(self, Engine): # String
self.add_query_param('Engine', Engine)
def get_DBInstanceStorageType(self): # String
return self.get_query_params().get('DBInstanceStorageType')
def set_DBInstanceStorageType(self, DBInstanceStorageType): # String
self.add_query_param('DBInstanceStorageType', DBInstanceStorageType)
def get_DBInstanceNetType(self): # String
return self.get_query_params().get('DBInstanceNetType')
def set_DBInstanceNetType(self, DBInstanceNetType): # String
self.add_query_param('DBInstanceNetType', DBInstanceNetType)
def get_RestoreTime(self): # String
return self.get_query_params().get('RestoreTime')
def set_RestoreTime(self, RestoreTime): # String
self.add_query_param('RestoreTime', RestoreTime)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_UsedTime(self): # String
return self.get_query_params().get('UsedTime')
def set_UsedTime(self, UsedTime): # String
self.add_query_param('UsedTime', UsedTime)
def get_RestoreType(self): # String
return self.get_query_params().get('RestoreType')
def set_RestoreType(self, RestoreType): # String
self.add_query_param('RestoreType', RestoreType)
def get_VPCId(self): # String
return self.get_query_params().get('VPCId')
def set_VPCId(self, VPCId): # String
self.add_query_param('VPCId', VPCId)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType)
def get_SourceRegion(self): # String
return self.get_query_params().get('SourceRegion')
def set_SourceRegion(self, SourceRegion): # String
self.add_query_param('SourceRegion', SourceRegion) | null |
868 | import sys
import os
import ldap3
try:
from django.contrib.auth import get_user_model
from django.conf.settings import CCC_PASS
except:
# there's a chance we won't get some results, but it still goes through
CCC_PASS = None
NAME_LENGTH = 30
# the size of first_name and last_name fields
server_pool = ldap3.ServerPool(('ldaps://ldapv2.wpi.edu', 'ldaps://vmldapalt.wpi.edu', 'ldaps://ldapv2back.wpi.edu'), pool_strategy=ldap3.FIRST, active=True, exhaust=True)
def METHOD_NAME():
conn_args = {}
conn_args['client_strategy'] = ldap3.SYNC
conn_args['read_only'] = True
conn_args['raise_exceptions'] = True
if CCC_PASS:
conn_args['user'] = 'wpieduPersonUUID=a7188b7da454ce4e2396e0e09abd3333,ou=People,dc=WPI,dc=EDU' # ie. the lnl CCC account dn
conn_args['password'] = CCC_PASS
return conn_args
def search_users(q):
ldap_q = "(& " + "".join(map(lambda tok: "(|(uid=%s*)(givenName=%s*)(sn=%s*))" % (tok, tok, tok), q.split(" "))) + ")"
conn_args = METHOD_NAME()
with ldap3.Connection(server_pool, **conn_args) as conn:
conn.search(search_base='ou=People,dc=wpi,dc=edu', search_filter=ldap_q, search_scope=ldap3.LEVEL, attributes=('givenName', 'sn', 'mail', 'uid', 'wpieduPersonClass'), paged_size=15)
resp = conn.response
return resp
def search_or_create_users(q):
ldap_resp = search_users(q)
objs = []
for ldap_u in ldap_resp:
ldap_u = ldap_u['attributes']
if 'uid' not in ldap_u:
continue
try:
class_year = ldap_u.get('wpieduPersonClass', [None])[0]
except IndexError:
class_year = None
try:
class_year = int(class_year)
except (ValueError, TypeError):
class_year = None
given_name = ldap_u.get('givenName', [''])
given_name.append('')
last_name = ldap_u.get('sn', [''])
last_name.append('')
u, created = get_user_model().objects.get_or_create(
username=ldap_u['uid'][0],
defaults={
'email': ldap_u.get('mail', [False])[0] or ldap_u['uid'][0] + "@wpi.edu",
'first_name': given_name[0][0:NAME_LENGTH - 1],
'last_name': last_name[0][0:NAME_LENGTH - 1],
'class_year': class_year,
}
)
objs.append(u)
return objs
def fill_in_user(user):
if user.first_name and user.last_name:
return user
conn_args = METHOD_NAME()
with ldap3.Connection(server_pool, **conn_args) as conn:
conn.search(search_base='ou=People,dc=wpi,dc=edu', search_filter=("(uid=%s)" % user.username), search_scope=ldap3.LEVEL, attributes=('givenName', 'sn', 'mail', 'wpieduPersonClass'), paged_size=1)
resp = conn.response
if len(resp):
resp = resp[0]['attributes']
if not user.first_name:
user.first_name = resp.get('givenName', [''])[0][0:NAME_LENGTH - 1]
if not user.last_name:
user.last_name = resp.get('sn', [''])[0][0:NAME_LENGTH - 1]
if not user.email:
user.email = resp.get('mail', [False])[0][0] or user.username + "@wpi.edu"
if not user.class_year:
try:
class_year = resp.get('wpieduPersonClass', [None])[0]
except IndexError:
class_year = None
try:
class_year = int(class_year)
except (ValueError, TypeError):
class_year = None
if class_year:
user.class_year = class_year
return user
def get_student_id(username):
"""
Obtain a user's Student ID number from the server (if tied into the WPI network).
:param username: The user's username (WPI network username)
:return: Student ID number
"""
try:
uid = os.popen('id -u ' + username).read().replace('\n', '')
if uid not in ['', None]:
return uid
except:
print('Unable to obtain id for ' + username)
return None
def search_with_id(student_id):
"""
Obtain the username for a user with a given Student ID number (if server is tied into WPI network).
:param student_id: Student ID number to use in the search
:return: The user's network username
"""
try:
username = os.popen('id +' + str(student_id) + ' -un').read().replace('\n', '')
if username not in ['', None]:
return username
except:
pass
return None
if __name__ == "__main__":
if len(sys.argv) == 1:
print("No argument")
exit()
print(search_or_create_users(" ".join(sys.argv[1:]))) | null |
869 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
from aliyunsdkcs.endpoint import endpoint_data
class ScaleOutClusterRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'CS', '2015-12-15', 'ScaleOutCluster')
self.set_uri_pattern('/api/v2/clusters/[ClusterId]')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_worker_data_disk(self):
return self.get_body_params().get('worker_data_disk')
def set_worker_data_disk(self,worker_data_disk):
self.add_body_params('worker_data_disk', worker_data_disk)
def get_key_pair(self):
return self.get_body_params().get('key_pair')
def set_key_pair(self,key_pair):
self.add_body_params('key_pair', key_pair)
def get_count(self):
return self.get_body_params().get('count')
def set_count(self,count):
self.add_body_params('count', count)
def get_worker_system_disk_category(self):
return self.get_body_params().get('worker_system_disk_category')
def set_worker_system_disk_category(self,worker_system_disk_category):
self.add_body_params('worker_system_disk_category', worker_system_disk_category)
def get_cloud_monitor_flags(self):
return self.get_body_params().get('cloud_monitor_flags')
def set_cloud_monitor_flags(self,cloud_monitor_flags):
self.add_body_params('cloud_monitor_flags', cloud_monitor_flags)
def get_ClusterId(self):
return self.get_path_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_path_param('ClusterId',ClusterId)
def get_user_data(self):
return self.get_body_params().get('user_data')
def set_user_data(self,user_data):
self.add_body_params('user_data', user_data)
def get_worker_period_unit(self):
return self.get_body_params().get('worker_period_unit')
def set_worker_period_unit(self,worker_period_unit):
self.add_body_params('worker_period_unit', worker_period_unit)
def get_worker_auto_renew(self):
return self.get_body_params().get('worker_auto_renew')
def set_worker_auto_renew(self,worker_auto_renew):
self.add_body_params('worker_auto_renew', worker_auto_renew)
def get_worker_auto_renew_period(self):
return self.get_body_params().get('worker_auto_renew_period')
def set_worker_auto_renew_period(self,worker_auto_renew_period):
self.add_body_params('worker_auto_renew_period', worker_auto_renew_period)
def get_worker_period(self):
return self.get_body_params().get('worker_period')
def METHOD_NAME(self,worker_period):
self.add_body_params('worker_period', worker_period)
def get_login_password(self):
return self.get_body_params().get('login_password')
def set_login_password(self,login_password):
self.add_body_params('login_password', login_password)
def get_worker_system_disk_size(self):
return self.get_body_params().get('worker_system_disk_size')
def set_worker_system_disk_size(self,worker_system_disk_size):
self.add_body_params('worker_system_disk_size', worker_system_disk_size)
def get_cpu_policy(self):
return self.get_body_params().get('cpu_policy')
def set_cpu_policy(self,cpu_policy):
self.add_body_params('cpu_policy', cpu_policy)
def get_disable_rollback(self):
return self.get_body_params().get('disable_rollback')
def set_disable_rollback(self,disable_rollback):
self.add_body_params('disable_rollback', disable_rollback)
def get_image_id(self):
return self.get_body_params().get('image_id')
def set_image_id(self,image_id):
self.add_body_params('image_id', image_id)
def get_worker_instance_charge_type(self):
return self.get_body_params().get('worker_instance_charge_type')
def set_worker_instance_charge_type(self,worker_instance_charge_type):
self.add_body_params('worker_instance_charge_type', worker_instance_charge_type | null |
870 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkslb.endpoint import endpoint_data
class CreateLoadBalancerUDPListenerRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Slb', '2014-05-15', 'CreateLoadBalancerUDPListener','slb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AclStatus(self): # String
return self.get_query_params().get('AclStatus')
def set_AclStatus(self, AclStatus): # String
self.add_query_param('AclStatus', AclStatus)
def get_AclType(self): # String
return self.get_query_params().get('AclType')
def set_AclType(self, AclType): # String
self.add_query_param('AclType', AclType)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_VServerGroupId(self): # String
return self.get_query_params().get('VServerGroupId')
def set_VServerGroupId(self, VServerGroupId): # String
self.add_query_param('VServerGroupId', VServerGroupId)
def get_AclId(self): # String
return self.get_query_params().get('AclId')
def set_AclId(self, AclId): # String
self.add_query_param('AclId', AclId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_MasterSlaveServerGroupId(self): # String
return self.get_query_params().get('MasterSlaveServerGroupId')
def set_MasterSlaveServerGroupId(self, MasterSlaveServerGroupId): # String
self.add_query_param('MasterSlaveServerGroupId', MasterSlaveServerGroupId)
def get_healthCheckReq(self): # String
return self.get_query_params().get('healthCheckReq')
def set_healthCheckReq(self, healthCheckReq): # String
self.add_query_param('healthCheckReq', healthCheckReq)
def get_BackendServerPort(self): # Integer
return self.get_query_params().get('BackendServerPort')
def set_BackendServerPort(self, BackendServerPort): # Integer
self.add_query_param('BackendServerPort', BackendServerPort)
def get_healthCheckInterval(self): # Integer
return self.get_query_params().get('healthCheckInterval')
def set_healthCheckInterval(self, healthCheckInterval): # Integer
self.add_query_param('healthCheckInterval', healthCheckInterval)
def get_healthCheckExp(self): # String
return self.get_query_params().get('healthCheckExp')
def set_healthCheckExp(self, healthCheckExp): # String
self.add_query_param('healthCheckExp', healthCheckExp)
def get_ProxyProtocolV2Enabled(self): # Boolean
return self.get_query_params().get('ProxyProtocolV2Enabled')
def set_ProxyProtocolV2Enabled(self, ProxyProtocolV2Enabled): # Boolean
self.add_query_param('ProxyProtocolV2Enabled', ProxyProtocolV2Enabled)
def get_HealthCheckSwitch(self): # String
return self.get_query_params().get('HealthCheckSwitch')
def set_HealthCheckSwitch(self, HealthCheckSwitch): # String
self.add_query_param('HealthCheckSwitch', HealthCheckSwitch)
def get_HealthCheckConnectTimeout(self): # Integer
return self.get_query_params().get('HealthCheckConnectTimeout')
def set_HealthCheckConnectTimeout(self, HealthCheckConnectTimeout): # Integer
self.add_query_param('HealthCheckConnectTimeout', HealthCheckConnectTimeout)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_ListenerPort(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Bandwidth(self): # Integer
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self, Bandwidth): # Integer
self.add_query_param('Bandwidth', Bandwidth)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort) | null |
871 | #!/usr/bin/env python3
# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC help output."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from collections import defaultdict
import os
import re
def parse_string(s):
assert s[0] == '"'
assert s[-1] == '"'
return s[1:-1]
def process_mapping(fname):
"""Find and parse conversion table in implementation file `fname`."""
cmds = []
in_rpcs = False
with open(fname, "r", encoding="utf8") as f:
for line in f:
line = line.rstrip()
if not in_rpcs:
if line == 'static const CRPCConvertParam vRPCConvertParams[] =':
in_rpcs = True
else:
if line.startswith('};'):
in_rpcs = False
elif '{' in line and '"' in line:
m = re.search(r'{ *("[^"]*"), *([0-9]+) *, *("[^"]*") *},', line)
assert m, 'No match to table expression: %s' % line
name = parse_string(m.group(1))
idx = int(m.group(2))
argname = parse_string(m.group(3))
cmds.append((name, idx, argname))
assert not in_rpcs and cmds
return cmds
class HelpRpcTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.supports_cli = False
def run_test(self):
self.test_client_conversion_table()
self.METHOD_NAME()
self.dump_help()
if self.is_wallet_compiled():
self.wallet_help()
def test_client_conversion_table(self):
file_conversion_table = os.path.join(self.config["environment"]["SRCDIR"], 'src', 'rpc', 'client.cpp')
mapping_client = process_mapping(file_conversion_table)
# Ignore echojson in client table
mapping_client = [m for m in mapping_client if m[0] != 'echojson']
mapping_server = self.nodes[0].help("dump_all_command_conversions")
# Filter all RPCs whether they need conversion
mapping_server_conversion = [tuple(m[:3]) for m in mapping_server if not m[3]]
# Only check if all RPC methods have been compiled (i.e. wallet is enabled)
if self.is_wallet_compiled() and sorted(mapping_client) != sorted(mapping_server_conversion):
raise AssertionError("RPC client conversion table ({}) and RPC server named arguments mismatch!\n{}".format(
file_conversion_table,
set(mapping_client).symmetric_difference(mapping_server_conversion),
))
# Check for conversion difference by argument name.
# It is preferable for API consistency that arguments with the same name
# have the same conversion, so bin by argument name.
all_methods_by_argname = defaultdict(list)
converts_by_argname = defaultdict(list)
for m in mapping_server:
all_methods_by_argname[m[2]].append(m[0])
converts_by_argname[m[2]].append(m[3])
for argname, convert in converts_by_argname.items():
if all(convert) != any(convert):
# Only allow dummy to fail consistency check
assert argname == 'dummy', ('WARNING: conversion mismatch for argument named %s (%s)' % (argname, list(zip(all_methods_by_argname[argname], converts_by_argname[argname]))))
def METHOD_NAME(self):
node = self.nodes[0]
# wrong argument count
assert_raises_rpc_error(-1, 'help', node.help, 'foo', 'bar')
# invalid argument
assert_raises_rpc_error(-1, 'JSON value is not a string as expected', node.help, 0)
# help of unknown command
assert_equal(node.help('foo'), 'help: unknown command: foo')
# command titles
titles = [line[3:-3] for line in node.help().splitlines() if line.startswith('==')]
components = ['Blockchain', 'Control', 'Generating', 'Mining', 'Network', 'Rawtransactions', 'Util']
if self.is_wallet_compiled():
components.append('Wallet')
if self.is_external_signer_compiled():
components.append('Signer')
if self.is_zmq_compiled():
components.append('Zmq')
assert_equal(titles, sorted(components))
def dump_help(self):
dump_dir = os.path.join(self.options.tmpdir, 'rpc_help_dump')
os.mkdir(dump_dir)
calls = [line.split(' ', 1)[0] for line in self.nodes[0].help().splitlines() if line and not line.startswith('==')]
for call in calls:
with open(os.path.join(dump_dir, call), 'w', encoding='utf-8') as f:
# Make sure the node can generate the help at runtime without crashing
f.write(self.nodes[0].help(call))
def wallet_help(self):
assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress')
self.restart_node(0, extra_args=['-nowallet=1'])
assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress')
if __name__ == '__main__':
HelpRpcTest().main() | null |
872 | """Utils for dynamically importing stuff."""
# Copyright (C) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
import importlib
import inspect
import json
import os
# TODO: To avoid error during importing yapf dynamically. After the bug is fixed, code should be removed.
try:
import yapf # noqa: F401
except ImportError:
pass
# pylint: disable=protected-access
SUPPORTED_BACKBONE_BACKENDS = {
"otx": "otx.algorithms.common.adapters.mmcv.models",
"mmcls": "mmcls.models",
"mmdet": "mmdet.models",
"mmseg": "mmseg.models",
"torchvision": "otx.algorithms.common.adapters.mmcv.models",
"pytorchcv": "mmdet.models",
"omz.mmcls": "otx.algorithms.classification.adapters.mmcls.models.backbones.mmov_backbone",
}
def get_impl_class(impl_path):
"""Returns a class by its path in package."""
task_impl_module_name, task_impl_class_name = impl_path.rsplit(".", 1)
task_impl_module = importlib.import_module(task_impl_module_name)
task_impl_class = getattr(task_impl_module, task_impl_class_name)
return task_impl_class
def get_backbone_list(backend):
"""Gather available backbone list from json file & imported lib."""
available_backbone_path = os.path.join(get_otx_root_path(), f"cli/builder/supported_backbone/{backend}.json")
available_backbones = {}
if os.path.exists(available_backbone_path):
with open(available_backbone_path, "r", encoding="UTF-8") as f:
available_backbones = json.load(f)
available_backbones = available_backbones["backbones"]
elif backend == "pytorchcv" and importlib.util.find_spec(backend):
backbone_list = importlib.import_module(f"{backend}.model_provider")._models
backbone_format = {"required": [], "options": {}, "available": []}
for backbone in backbone_list:
backbone_type = f"mmdet.{backbone}"
available_backbones[backbone_type] = backbone_format
else:
raise ValueError(f"{backend} cannot be imported or supported.")
return available_backbones
def METHOD_NAME(backend=None):
"""Gather backbone list from backends."""
if backend not in SUPPORTED_BACKBONE_BACKENDS:
raise ValueError(f"{backend} is an unsupported backbone backend.")
custom_imports = []
backend_import_path = SUPPORTED_BACKBONE_BACKENDS[backend]
mm_backbones = importlib.import_module(backend_import_path)
mm_registry = mm_backbones.BACKBONES
custom_imports.append(backend_import_path)
return mm_registry, custom_imports
def get_module_args(module):
"""Gather module's Required Args."""
if module is None:
return []
required_args = []
default_args = {}
args_signature = inspect.signature(module)
for arg_key, arg_value in args_signature.parameters.items():
if arg_value.default is inspect.Parameter.empty:
required_args.append(arg_key)
continue
default_args[arg_key] = arg_value.default
# Get args from parents
parent_module = module.__bases__
while len(parent_module):
parent_args_signature = inspect.signature(parent_module[0])
for arg_key, arg_value in parent_args_signature.parameters.items():
if arg_key == "depth" and "arch" in required_args:
continue
if arg_value.default is inspect.Parameter.empty and arg_key not in required_args:
required_args.append(arg_key)
continue
parent_module = parent_module[0].__bases__
required_args = [arg for arg in required_args if arg not in ("args", "kwargs", "self")]
return required_args, default_args
def get_otx_root_path():
"""Get otx root path from importing otx."""
otx_module = importlib.import_module("otx")
if otx_module:
return os.path.dirname(inspect.getfile(otx_module))
return None | null |
873 | # Copyright (c) ZenML GmbH 2022. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Image build context."""
import os
from pathlib import Path
from typing import IO, Dict, List, Optional, Set, Tuple, cast
from zenml.constants import REPOSITORY_DIRECTORY_NAME
from zenml.io import fileio
from zenml.logger import get_logger
from zenml.utils import io_utils, string_utils
logger = get_logger(__name__)
class BuildContext:
"""Image build context.
This class is responsible for creating an archive of the files needed to
build a container image.
"""
def __init__(
self,
root: Optional[str] = None,
METHOD_NAME: Optional[str] = None,
) -> None:
"""Initializes a build context.
Args:
root: Optional root directory for the build context.
dockerignore_file: Optional path to a dockerignore file. If not
given, a file called `.dockerignore` in the build context root
directory will be used instead if it exists.
"""
self._root = root
self._dockerignore_file = METHOD_NAME
self._extra_files: Dict[str, str] = {}
@property
def METHOD_NAME(self) -> Optional[str]:
"""The dockerignore file to use.
Returns:
Path to the dockerignore file to use.
"""
if self._dockerignore_file:
return self._dockerignore_file
if self._root:
default_dockerignore_path = os.path.join(
self._root, ".dockerignore"
)
if fileio.exists(default_dockerignore_path):
return default_dockerignore_path
return None
def add_file(self, source: str, destination: str) -> None:
"""Adds a file to the build context.
Args:
source: The source of the file to add. This can either be a path
or the file content.
destination: The path inside the build context where the file
should be added.
"""
if fileio.exists(source):
with fileio.open(source) as f:
self._extra_files[destination] = f.read()
else:
self._extra_files[destination] = source
def add_directory(self, source: str, destination: str) -> None:
"""Adds a directory to the build context.
Args:
source: Path to the directory.
destination: The path inside the build context where the directory
should be added.
Raises:
ValueError: If `source` does not point to a directory.
"""
if not fileio.isdir(source):
raise ValueError(
f"Can't add directory {source} to the build context as it "
"does not exist or is not a directory."
)
for dir, _, files in fileio.walk(source):
dir_path = Path(fileio.convert_to_str(dir))
for file_name in files:
file_name = fileio.convert_to_str(file_name)
file_source = dir_path / file_name
file_destination = (
Path(destination)
/ dir_path.relative_to(source)
/ file_name
)
with file_source.open("r") as f:
self._extra_files[file_destination.as_posix()] = f.read()
def write_archive(self, output_file: IO[bytes], gzip: bool = True) -> None:
"""Writes an archive of the build context to the given file.
Args:
output_file: The file to write the archive to.
gzip: Whether to use `gzip` to compress the file.
"""
from docker.utils import build as docker_build_utils
files = self._get_files()
extra_files = self._get_extra_files()
context_archive = docker_build_utils.create_archive(
fileobj=output_file,
root=self._root,
files=sorted(files),
gzip=gzip,
extra_files=extra_files,
)
build_context_size = os.path.getsize(context_archive.name)
if (
self._root
and build_context_size > 50 * 1024 * 1024
and not self.METHOD_NAME
):
# The build context exceeds 50MiB and we didn't find any excludes
# in dockerignore files -> remind to specify a .dockerignore file
logger.warning(
"Build context size for docker image: `%s`. If you believe this is "
"unreasonably large, make sure to include a `.dockerignore` file "
"at the root of your build context `%s` or specify a custom file "
"in the Docker configuration when defining your pipeline.",
string_utils.get_human_readable_filesize(build_context_size),
os.path.join(self._root, ".dockerignore"),
)
def _get_files(self) -> Set[str]:
"""Gets all non-ignored files in the build context root directory.
Returns:
All build context files.
"""
if self._root:
exclude_patterns = self._get_exclude_patterns()
from docker.utils import build as docker_build_utils
return cast(
Set[str],
docker_build_utils.exclude_paths(
self._root, patterns=exclude_patterns
),
)
else:
return set()
def _get_extra_files(self) -> List[Tuple[str, str]]:
"""Gets all extra files of the build context.
Returns:
A tuple (path, file_content) for all extra files in the build
context.
"""
return list(self._extra_files.items())
def _get_exclude_patterns(self) -> List[str]:
"""Gets all exclude patterns from the dockerignore file.
Returns:
The exclude patterns from the dockerignore file.
"""
dockerignore = self.METHOD_NAME
if dockerignore:
patterns = self._parse_dockerignore(dockerignore)
# Always include the .zen directory
patterns.append(f"!/{REPOSITORY_DIRECTORY_NAME}")
return patterns
else:
logger.info(
"No `.dockerignore` found, including all files inside build "
"context.",
)
return []
@staticmethod
def _parse_dockerignore(dockerignore_path: str) -> List[str]:
"""Parses a dockerignore file and returns a list of patterns to ignore.
Args:
dockerignore_path: Path to the dockerignore file.
Returns:
List of patterns to ignore.
"""
try:
file_content = io_utils.read_file_contents_as_string(
dockerignore_path
)
except FileNotFoundError:
logger.warning(
"Unable to find dockerignore file at path '%s'.",
dockerignore_path,
)
return []
exclude_patterns = []
for line in file_content.split("\n"):
line = line.strip()
if line and not line.startswith("#"):
exclude_patterns.append(line)
return exclude_patterns | null |
874 | ## @file
# This is an XML API that uses a syntax similar to XPath, but it is written in
# standard python so that no extra python packages are required to use it.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
XmlRoutines
'''
##
# Import Modules
#
import xml.dom.minidom
import re
import codecs
from Logger.ToolError import PARSER_ERROR
import Logger.Log as Logger
## Create a element of XML
#
# @param Name
# @param String
# @param NodeList
# @param AttributeList
#
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if isinstance(Item, type([])):
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
else:
Element.appendChild(Item)
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
## Get a list of XML nodes using XPath style syntax.
#
# Return a list of XML DOM nodes from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
def XmlList(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Nodes = [Dom]
Index = 0
End = len(TagList) - 1
while Index <= End:
ChildNodes = []
for Node in Nodes:
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == \
TagList[Index]:
if Index < End:
ChildNodes.extend(Node.childNodes)
else:
ChildNodes.append(Node)
Nodes = ChildNodes
ChildNodes = []
Index += 1
return Nodes
## Get a single XML node using XPath style syntax.
#
# Return a single XML DOM node from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
def XmlNode(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return None
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Index = 0
End = len(TagList) - 1
ChildNodes = [Dom]
while Index <= End:
for Node in ChildNodes:
if Node.nodeType == Node.ELEMENT_NODE and \
Node.tagName == TagList[Index]:
if Index < End:
ChildNodes = Node.childNodes
else:
return Node
break
Index += 1
return None
## Get a single XML element using XPath style syntax.
#
# Return a single XML element from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Strin A XPath style path.
#
def XmlElement(Dom, String):
try:
return XmlNode(Dom, String).firstChild.data.strip()
except BaseException:
return ""
## Get a single XML element using XPath style syntax.
#
# Similar with XmlElement, but do not strip all the leading and tailing space
# and newline, instead just remove the newline and spaces introduced by
# toprettyxml()
#
# @param Dom The root XML DOM object.
# @param Strin A XPath style path.
#
def XmlElement2(Dom, String):
try:
HelpStr = XmlNode(Dom, String).firstChild.data
gRemovePrettyRe = re.compile(r"""(?:(\n *) )(.*)\1""", re.DOTALL)
HelpStr = re.sub(gRemovePrettyRe, r"\2", HelpStr)
return HelpStr
except BaseException:
return ""
## Get a single XML element of the current node.
#
# Return a single XML element specified by the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
def XmlElementData(Dom):
try:
return Dom.firstChild.data.strip()
except BaseException:
return ""
## Get a list of XML elements using XPath style syntax.
#
# Return a list of XML elements from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM object.
# @param String A XPath style path.
#
def XmlElementList(Dom, String):
return list(map(XmlElementData, XmlList(Dom, String)))
## Get the XML attribute of the current node.
#
# Return a single XML attribute named Attribute from the current root Dom.
# If the input Dom or Attribute is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Attribute The name of Attribute.
#
def XmlAttribute(Dom, Attribute):
try:
return Dom.getAttribute(Attribute)
except BaseException:
return ''
## Get the XML node name of the current node.
#
# Return a single XML node name from the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
def XmlNodeName(Dom):
try:
return Dom.nodeName.strip()
except BaseException:
return ''
## Parse an XML file.
#
# Parse the input XML file named FileName and return a XML DOM it stands for.
# If the input File is not a valid XML file, then an empty string is returned.
#
# @param FileName The XML file name.
#
def METHOD_NAME(FileName):
try:
XmlFile = codecs.open(FileName, 'rb')
Dom = xml.dom.minidom.parse(XmlFile)
XmlFile.close()
return Dom
except BaseException as XExcept:
XmlFile.close()
Logger.Error('\nUPT', PARSER_ERROR, XExcept, File=FileName, RaiseError=True) | null |
875 | """Add Label Schema Table
Revision ID: cf7e13f71c9d
Revises: 6f3b45681519
Create Date: 2022-04-04 11:09:56.559955
"""
from alembic import op
import sqlalchemy as sa
import datetime
from sqlalchemy import orm
from shared.database.labels.label_schema import LabelSchema
from shared.database.user import User
from shared.database.auth.member import Member
from shared.database.project import Project, UserbaseProject
from shared.database.attribute.attribute_template_group import Attribute_Template_Group
from shared.database.annotation.instance_template import InstanceTemplate
from shared.database.source_control.working_dir import WorkingDir, WorkingDirFileLink
from shared.database.source_control.file import File
from sqlalchemy.ext.declarative import declarative_base
from shared.database.task.job.job import Job
# revision identifiers, used by Alembic.
revision = 'cf7e13f71c9d'
down_revision = '6f3b45681519'
branch_labels = None
depends_on = None
Base = declarative_base()
def add_schemas_to_projects(op):
bind = op.get_bind()
session = orm.Session(bind = bind)
all_projects = session.query(Project).all()
if len(all_projects) == 0:
return
super_admin = session.query(User).filter(
User.is_super_admin == True
).first()
if not super_admin:
raise Exception(
'Cannot migrated data, need at least one super admin user. Please set a user with super_admin flag enabled')
member = session.query(Member).filter(
Member.user_id == super_admin.id
).first()
print('Creating Schemas for all projects')
i = 0
for project in all_projects:
new_schema = LabelSchema.new(
session = session,
name = 'Default Schema',
project_id = project.id,
member_created_id = member.id
)
directory = project.directory_default
if directory is None:
label_file_list = session.query(File).filter(
File.project_id == project.id, File.type == 'label').all()
else:
working_dir_sub_query = session.query(WorkingDirFileLink).filter(
WorkingDirFileLink.working_dir_id == directory.id,
WorkingDirFileLink.type == "label").subquery('working_dir_sub_query')
label_file_list = session.query(File).filter(
File.id == working_dir_sub_query.c.file_id).all()
for label_file in label_file_list:
rel = new_schema.add_label_file(session, label_file.id, member.id)
print(f' --> Added Label: {label_file.label.name} to Schema')
attribute_groups_list = session.query(Attribute_Template_Group).filter(
Attribute_Template_Group.project_id == project.id
).all()
for attr_grp in attribute_groups_list:
rel = new_schema.add_attribute_group(session, attr_grp.id, member.id)
print(f' --> Added Attribute Group: [{attr_grp.name} - {attr_grp.prompt}] to Schema')
instance_template_list = session.query(InstanceTemplate).filter(
InstanceTemplate.project_id == project.id
).all()
for template in instance_template_list:
rel = new_schema.add_instance_template(session, template.id, member.id)
print(f' --> Added Attribute Group: {template.name} to Schema')
print(f'Num Projects [{i}/{len(all_projects) - 1}]')
i += 1
job_list = session.query(Job).filter(
Job.project_id == project.id
)
for job in job_list:
job.label_schema_id = new_schema.id
session.add(job)
def upgrade():
op.create_table('label_schema',
sa.Column('id', sa.Integer(), nullable = False),
sa.Column('name', sa.String(), nullable = False),
sa.Column('project_id', sa.Integer(), sa.ForeignKey('project.id')),
sa.Column('archived', sa.Boolean(), default = False),
sa.Column('member_created_id', sa.Integer(), sa.ForeignKey('member.id')),
sa.Column('member_updated_id', sa.Integer(), sa.ForeignKey('member.id')),
sa.Column('time_created', sa.DateTime, default = datetime.datetime.utcnow),
sa.Column('time_updated', sa.DateTime, onupdate = datetime.datetime.utcnow),
sa.PrimaryKeyConstraint('id')
)
op.create_index('index_label_schema_project_id', 'label_schema', ['project_id'])
op.create_table('label_schema_link',
sa.Column('id', sa.Integer(), nullable = False),
sa.Column('schema_id', sa.Integer(), sa.ForeignKey('label_schema.id')),
sa.Column('label_file_id', sa.Integer(), sa.ForeignKey('file.id')),
sa.Column('instance_template_id', sa.Integer(), sa.ForeignKey('instance_template.id')),
sa.Column('attribute_template_group_id', sa.Integer(),
sa.ForeignKey('attribute_template_group.id')),
sa.Column('member_created_id', sa.Integer(), sa.ForeignKey('member.id')),
sa.Column('member_updated_id', sa.Integer(), sa.ForeignKey('member.id')),
sa.Column('time_created', sa.DateTime, default = datetime.datetime.utcnow),
sa.Column('time_updated', sa.DateTime, onupdate = datetime.datetime.utcnow),
sa.PrimaryKeyConstraint('id')
)
op.create_index('index_label_schema_link_schema_id', 'label_schema_link', ['schema_id'])
op.create_index('index_label_schema_link_label_file_id', 'label_schema_link', ['label_file_id'])
# op.create_index('index_attribute_template_project_id', 'attribute_template', ['project_id'])
op.add_column('job', sa.Column('label_schema_id', sa.Integer, sa.ForeignKey('label_schema.id')))
add_schemas_to_projects(op)
def METHOD_NAME():
op.drop_column('job', 'label_schema_id')
op.drop_index('index_label_schema_project_id', 'label_schema')
op.drop_index('index_label_schema_link_schema_id', 'label_schema_link')
op.drop_index('index_label_schema_link_label_file_id', 'label_schema_link')
op.drop_index('index_attribute_template_project_id', 'attribute_template')
op.drop_table('label_schema_link')
op.drop_table('label_schema') | null |
876 | # coding: utf-8
"""
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
"""
import unittest
from unittest.mock import patch
import urllib3
import typing_extensions
import unit_test_api
from unit_test_api.paths.request_body_post_integer_type_matches_integers_request_body.post import operation as post # noqa: E501
from unit_test_api import schemas, api_client
from unit_test_api.configurations import api_configuration, schema_configuration
from .. import ApiTestMixin
class TestPost(ApiTestMixin, unittest.TestCase):
"""
Post unit test stubs
"""
api_config = api_configuration.ApiConfiguration()
schema_config = schema_configuration.SchemaConfiguration()
used_api_client = api_client.ApiClient(configuration=api_config, schema_configuration=schema_config)
api = post.ApiForPost(api_client=used_api_client) # noqa: E501
response_status = 200
response_body = ''
def test_an_object_is_not_an_integer_fails(self):
content_type = 'application/json'
# an object is not an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
{
}
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def test_a_string_is_not_an_integer_fails(self):
content_type = 'application/json'
# a string is not an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
"foo"
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def test_null_is_not_an_integer_fails(self):
content_type = 'application/json'
# null is not an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
None
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def METHOD_NAME(self):
content_type = 'application/json'
# a float with zero fractional part is an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
1.0
)
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
mock_request.return_value = self.response(
self.json_bytes(self.response_body),
status=self.response_status
)
api_response = self.api.post(
body=body,
)
self.assert_pool_manager_request_called_with(
mock_request,
self.api_config.get_server_url('servers', None) + "/requestBody/postIntegerTypeMatchesIntegersRequestBody",
method='post'.upper(),
body=self.json_bytes(payload),
content_type=content_type,
)
assert isinstance(api_response.response, urllib3.HTTPResponse)
assert isinstance(api_response.body, schemas.Unset)
def test_a_float_is_not_an_integer_fails(self):
content_type = 'application/json'
# a float is not an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
1.1
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def test_a_boolean_is_not_an_integer_fails(self):
content_type = 'application/json'
# a boolean is not an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
True
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def test_an_integer_is_an_integer_passes(self):
content_type = 'application/json'
# an integer is an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
1
)
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
mock_request.return_value = self.response(
self.json_bytes(self.response_body),
status=self.response_status
)
api_response = self.api.post(
body=body,
)
self.assert_pool_manager_request_called_with(
mock_request,
self.api_config.get_server_url('servers', None) + "/requestBody/postIntegerTypeMatchesIntegersRequestBody",
method='post'.upper(),
body=self.json_bytes(payload),
content_type=content_type,
)
assert isinstance(api_response.response, urllib3.HTTPResponse)
assert isinstance(api_response.body, schemas.Unset)
def test_a_string_is_still_not_an_integer_even_if_it_looks_like_one_fails(self):
content_type = 'application/json'
# a string is still not an integer, even if it looks like one
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
"1"
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
def test_an_array_is_not_an_integer_fails(self):
content_type = 'application/json'
# an array is not an integer
with patch.object(urllib3.PoolManager, 'request') as mock_request:
payload = (
[
]
)
with self.assertRaises((unit_test_api.ApiValueError, unit_test_api.ApiTypeError)):
body = post.request_body.RequestBody.content["application/json"].schema.validate(
payload,
configuration=self.schema_config
)
self.api.post(body=body)
if __name__ == '__main__':
unittest.main() | null |
877 | """
CMMLU: Measuring massive multitask language understanding in Chinese
https://arxiv.org/abs/2306.09212
CMMLU is a comprehensive evaluation benchmark specifically designed to evaluate the knowledge and reasoning abilities of LLMs within the context of Chinese language and culture.
CMMLU covers a wide range of subjects, comprising 67 topics that span from elementary to advanced professional levels.
Homepage: https://github.com/haonan-li/CMMLU
"""
from lm_eval.base import MultipleChoiceTask
_CITATION = """
@misc{li2023cmmlu,
title={CMMLU: Measuring massive multitask language understanding in Chinese},
author={Haonan Li and Yixuan Zhang and Fajri Koto and Yifei Yang and Hai Zhao and Yeyun Gong and Nan Duan and Timothy Baldwin},
year={2023},
eprint={2306.09212},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
"""
SUBJECTS = {
"agronomy": "农学",
"anatomy": "解剖学",
"ancient_chinese": "古汉语",
"arts": "艺术学",
"astronomy": "天文学",
"business_ethics": "商业伦理",
"chinese_civil_service_exam": "中国公务员考试",
"chinese_driving_rule": "中国驾驶规则",
"chinese_food_culture": "中国饮食文化",
"chinese_foreign_policy": "中国外交政策",
"chinese_history":"中国历史",
"chinese_literature": "中国文学",
"chinese_teacher_qualification": "中国教师资格",
"clinical_knowledge": "临床知识",
"college_actuarial_science":"大学精算学",
"college_education":"大学教育学",
"college_engineering_hydrology": "大学工程水文学",
"college_law": "大学法律",
"college_mathematics": "大学数学",
"college_medical_statistics":"大学医学统计",
"college_medicine": "大学医学",
"computer_science": "计算机科学",
"computer_security": "计算机安全",
"conceptual_physics": "概念物理学",
"construction_project_management": "建设工程管理",
"economics": "经济学",
"education": "教育学",
"electrical_engineering": "电气工程",
"elementary_chinese":"小学语文",
"elementary_commonsense":"小学常识",
"elementary_information_and_technology": "小学信息技术",
"elementary_mathematics": "初等数学",
"ethnology": "民族学",
"food_science": "食品科学",
"genetics": "遗传学",
"global_facts": "全球事实",
"high_school_biology": "高中生物",
"high_school_chemistry": "高中化学",
"high_school_geography": "高中地理",
"high_school_mathematics": "高中数学",
"high_school_physics": "高中物理学",
"high_school_politics": "高中政治",
"human_sexuality": "人类性行为",
"international_law": "国际法学",
"journalism": "新闻学",
"jurisprudence": "法理学",
"legal_and_moral_basis": "法律与道德基础",
"logical": "逻辑学",
"machine_learning": "机器学习",
"management": "管理学",
"marketing": "市场营销",
"marxist_theory": "马克思主义理论",
"modern_chinese": "现代汉语",
"nutrition": "营养学",
"philosophy": "哲学",
"professional_accounting": "专业会计",
"professional_law": "专业法学",
"professional_medicine": "专业医学",
"professional_psychology": "专业心理学",
"public_relations": "公共关系",
"security_study":"安全研究",
"sociology": "社会学",
"sports_science": "体育学",
"traditional_chinese_medicine": "中医中药",
"virology": "病毒学",
"world_history":"世界历史",
"world_religions": "世界宗教",
}
def create_all_tasks():
"""Creates a dictionary of tasks from a list of subjects
:return: {task_name: task}
e.g. {cmmlu-world_history: Task, cmmlu-virology: Task}
"""
return {f"cmmlu-{sub}": create_task(sub) for sub in SUBJECTS.keys()}
def create_task(subject):
class Cmmlu(CmmluSubject):
def __init__(self):
super().__init__(subject)
return Cmmlu
class CmmluSubject(MultipleChoiceTask):
VERSION = 1
DATASET_PATH = "haonan-li/cmmlu"
DATASET_NAME = None
def __init__(self, subject):
self.DATASET_NAME = subject
super().__init__()
def has_training_docs(self):
return False
def has_validation_docs(self):
return True
def has_test_docs(self):
return True
def validation_docs(self):
if self.has_validation_docs():
return map(self._process_doc,self.dataset["dev"])
def METHOD_NAME(self):
if self.has_test_docs():
return map(self._process_doc,self.dataset["test"])
def _format_subject(self, subject):
words = subject.split("_")
return " ".join(words)
def fewshot_context(self, doc, num_fewshot, **kwargs):
subject = self.DATASET_NAME
description= f"以下是关于{SUBJECTS[subject]}的单项选择题,请直接给出正确答案的选项。"
kwargs["description"] = description
return super().fewshot_context(doc=doc, num_fewshot=num_fewshot, **kwargs)
def _process_doc(self, doc):
def format_example(doc, keys):
"""
<prompt>
A. <choice1>
B. <choice2>
C. <choice3>
D. <choice4>
答案:
"""
question = doc["Question"].strip()
choices = "".join(
[f'{key}. {doc[key]}\n' for key in keys]
)
prompt = f"{question}\n{choices}答案:"
return prompt
keys = ["A", "B", "C", "D"]
return {
"query": format_example(doc, keys),
"choices": keys,
"gold": ord(doc["Answer"])-ord("A"),
}
def fewshot_examples(self, k, rnd):
if self._fewshot_docs is None:
self._fewshot_docs = list(map(self._process_doc, self.dataset["dev"]))
# use the unchanged order of the dev set without sampling,
return self._fewshot_docs[:k]
def doc_to_text(self, doc):
return doc["query"]
def should_decontaminate(self):
return True
def doc_to_decontamination_query(self, doc):
return doc["query"] | null |
878 | # -*- coding: utf-8 -*-
"""Handle app url related tests.
Copyright (C) 2021 Gitcoin Core
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from secrets import token_hex
from django.urls import resolve, reverse
from test_plus.test import TestCase
class AppUrlsTestCase(TestCase):
"""Define tests for app urls."""
def setUp(self):
self.user = self.make_user()
def test_robotstxt_reverse(self):
"""Test the robotstxt url and check the reverse."""
self.assertEqual(reverse('robotstxt'), '/robots.txt')
def test_robotstxt_resolve(self):
"""Test the robotstxt url and check the resolution."""
self.assertEqual(resolve('/robots.txt').view_name, 'robotstxt')
self.assertEqual(resolve('/robots.txt/').view_name, 'robotstxt')
def test_sitemap_reverse(self):
"""Test the sitemap url and check the reverse."""
self.assertEqual(reverse('django.contrib.sitemaps.views.index'), '/sitemap.xml')
def test_sitemap_resolve(self):
"""Test the sitemap url and check the resolution."""
self.assertEqual(resolve('/sitemap.xml').view_name, 'django.contrib.sitemaps.views.index')
def METHOD_NAME(self):
"""Test the email_settings url and check the reverse."""
priv_key = token_hex(16)[:29]
self.assertEqual(reverse('email_settings', args=(priv_key, )), f'/settings/email/{priv_key}')
def test_email_settings_resolve(self):
"""Test the email_settings url and check the resolution."""
self.assertEqual(resolve('/settings/email/').view_name, 'email_settings')
def test_leaderboard_reverse(self):
"""Test the leaderboard url and check the reverse."""
self.assertEqual(reverse('leaderboard', args=('quarterly_earners', )), '/leaderboard/quarterly_earners')
def test_leaderboard_resolve(self):
"""Test the leaderboard url and check the resolution."""
self.assertEqual(resolve('/leaderboard/').view_name, 'leaderboard')
def test__leaderboard_reverse(self):
"""Test the _leaderboard url and check the reverse."""
self.assertEqual(reverse('_leaderboard'), '/leaderboard')
def test__leaderboard_resolve(self):
"""Test the _leaderboard url and check the resolution."""
self.assertEqual(resolve('/leaderboard').view_name, '_leaderboard')
def test_stats_reverse(self):
"""Test the stats url and check the reverse."""
self.assertEqual(reverse('stats'), '/_administration/stats/')
def test_stats_resolve(self):
"""Test the stats url and check the resolution."""
self.assertEqual(resolve('/_administration/stats/').view_name, 'stats')
def test_explorer_reverse(self):
"""Test the explorer url and check the reverse."""
self.assertEqual(reverse('explorer'), '/explorer')
def test_explorer_resolve(self):
"""Test the explorer url and check the resolution."""
self.assertEqual(resolve('/explorer').view_name, 'explorer')
self.assertEqual(resolve('/explorer/').view_name, 'explorer')
def test_new_bounty_reverse(self):
"""Test the new_bounty url and check the reverse."""
self.assertEqual(reverse('new_bounty'), '/bounty/new')
def test_new_bounty_resolve(self):
"""Test the new_bounty url and check the resolution."""
self.assertEqual(resolve('/bounty/new').view_name, 'new_bounty')
self.assertEqual(resolve('/bounty/new/').view_name, 'new_bounty')
def test_uniterested_reverse(self):
"""Test the uninterested url and check the reverse"""
self.assertEqual(reverse('uninterested', args=[1, 2]), '/actions/bounty/1/interest/2/uninterested/')
def test_uniterested_resolve(self):
"""Test the uninterested url and check the resolution"""
self.assertEqual(resolve('/actions/bounty/1/interest/2/uninterested/').view_name, 'uninterested') | null |
879 | # coding: utf-8
"""
PKS
PKS API # noqa: E501
OpenAPI spec version: 1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class UsageTotals(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'cpu': 'int',
'memory': 'float',
'cluster': 'int'
}
attribute_map = {
'cpu': 'cpu',
'memory': 'memory',
'cluster': 'cluster'
}
def __init__(self, cpu=None, memory=None, cluster=None): # noqa: E501
"""UsageTotals - a model defined in Swagger""" # noqa: E501
self._cpu = None
self._memory = None
self._cluster = None
self.discriminator = None
self.cpu = cpu
self.memory = memory
self.cluster = cluster
@property
def cpu(self):
"""Gets the cpu of this UsageTotals. # noqa: E501
:return: The cpu of this UsageTotals. # noqa: E501
:rtype: int
"""
return self._cpu
@cpu.setter
def cpu(self, cpu):
"""Sets the cpu of this UsageTotals.
:param cpu: The cpu of this UsageTotals. # noqa: E501
:type: int
"""
if cpu is None:
raise ValueError("Invalid value for `cpu`, must not be `None`") # noqa: E501
self._cpu = cpu
@property
def memory(self):
"""Gets the memory of this UsageTotals. # noqa: E501
:return: The memory of this UsageTotals. # noqa: E501
:rtype: float
"""
return self._memory
@memory.setter
def memory(self, memory):
"""Sets the memory of this UsageTotals.
:param memory: The memory of this UsageTotals. # noqa: E501
:type: float
"""
if memory is None:
raise ValueError("Invalid value for `memory`, must not be `None`") # noqa: E501
self._memory = memory
@property
def cluster(self):
"""Gets the cluster of this UsageTotals. # noqa: E501
:return: The cluster of this UsageTotals. # noqa: E501
:rtype: int
"""
return self._cluster
@cluster.setter
def cluster(self, cluster):
"""Sets the cluster of this UsageTotals.
:param cluster: The cluster of this UsageTotals. # noqa: E501
:type: int
"""
if cluster is None:
raise ValueError("Invalid value for `cluster`, must not be `None`") # noqa: E501
self._cluster = cluster
def METHOD_NAME(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.METHOD_NAME() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.METHOD_NAME()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].METHOD_NAME())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(UsageTotals, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.METHOD_NAME())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UsageTotals):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other | null |
880 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkadcp.endpoint import endpoint_data
import json
class UpdateHubClusterFeatureRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'adcp', '2022-01-01', 'UpdateHubClusterFeature','adcp')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AccessControlList(self): # Array
return self.get_query_params().get('AccessControlList')
def set_AccessControlList(self, AccessControlList): # Array
self.add_query_param("AccessControlList", json.dumps(AccessControlList))
def get_MonitorEnabled(self): # Boolean
return self.get_query_params().get('MonitorEnabled')
def set_MonitorEnabled(self, MonitorEnabled): # Boolean
self.add_query_param('MonitorEnabled', MonitorEnabled)
def get_DeletionProtection(self): # Boolean
return self.get_query_params().get('DeletionProtection')
def set_DeletionProtection(self, DeletionProtection): # Boolean
self.add_query_param('DeletionProtection', DeletionProtection)
def get_EnableMesh(self): # Boolean
return self.get_query_params().get('EnableMesh')
def set_EnableMesh(self, EnableMesh): # Boolean
self.add_query_param('EnableMesh', EnableMesh)
def get_ArgoCDHAEnabled(self): # Boolean
return self.get_query_params().get('ArgoCDHAEnabled')
def set_ArgoCDHAEnabled(self, ArgoCDHAEnabled): # Boolean
self.add_query_param('ArgoCDHAEnabled', ArgoCDHAEnabled)
def get_ArgoCDEnabled(self): # Boolean
return self.get_query_params().get('ArgoCDEnabled')
def set_ArgoCDEnabled(self, ArgoCDEnabled): # Boolean
self.add_query_param('ArgoCDEnabled', ArgoCDEnabled)
def get_VSwitches(self): # Array
return self.get_query_params().get('VSwitches')
def set_VSwitches(self, VSwitches): # Array
self.add_query_param("VSwitches", json.dumps(VSwitches))
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_PublicAccessEnabled(self): # Boolean
return self.get_query_params().get('PublicAccessEnabled')
def set_PublicAccessEnabled(self, PublicAccessEnabled): # Boolean
self.add_query_param('PublicAccessEnabled', PublicAccessEnabled)
def get_PublicApiServerEnabled(self): # Boolean
return self.get_query_params().get('PublicApiServerEnabled')
def set_PublicApiServerEnabled(self, PublicApiServerEnabled): # Boolean
self.add_query_param('PublicApiServerEnabled', PublicApiServerEnabled)
def get_ArgoServerEnabled(self): # Boolean
return self.get_query_params().get('ArgoServerEnabled')
def set_ArgoServerEnabled(self, ArgoServerEnabled): # Boolean
self.add_query_param('ArgoServerEnabled', ArgoServerEnabled)
def get_WorkflowScheduleMode(self): # String
return self.get_query_params().get('WorkflowScheduleMode')
def set_WorkflowScheduleMode(self, WorkflowScheduleMode): # String
self.add_query_param('WorkflowScheduleMode', WorkflowScheduleMode)
def get_AuditLogEnabled(self): # Boolean
return self.get_query_params().get('AuditLogEnabled')
def set_AuditLogEnabled(self, AuditLogEnabled): # Boolean
self.add_query_param('AuditLogEnabled', AuditLogEnabled)
def METHOD_NAME(self): # String
return self.get_query_params().get('ClusterId')
def set_ClusterId(self, ClusterId): # String
self.add_query_param('ClusterId', ClusterId)
def get_PriceLimit(self): # String
return self.get_query_params().get('PriceLimit')
def set_PriceLimit(self, PriceLimit): # String
self.add_query_param('PriceLimit', PriceLimit)
def get_ApiServerEipId(self): # String
return self.get_query_params().get('ApiServerEipId')
def set_ApiServerEipId(self, ApiServerEipId): # String
self.add_query_param('ApiServerEipId', ApiServerEipId) | null |
881 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkemr.endpoint import endpoint_data
class ListClustersRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'ListClusters')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_StatusLists(self):
return self.get_query_params().get('StatusList')
def set_StatusLists(self, StatusLists):
for depth1 in range(len(StatusLists)):
if StatusLists[depth1] is not None:
self.add_query_param('StatusList.' + str(depth1 + 1) , StatusLists[depth1])
def get_IsDesc(self):
return self.get_query_params().get('IsDesc')
def set_IsDesc(self,IsDesc):
self.add_query_param('IsDesc',IsDesc)
def get_DepositType(self):
return self.get_query_params().get('DepositType')
def set_DepositType(self,DepositType):
self.add_query_param('DepositType',DepositType)
def get_PageNumber(self):
return self.get_query_params().get('PageNumber')
def set_PageNumber(self,PageNumber):
self.add_query_param('PageNumber',PageNumber)
def get_MachineType(self):
return self.get_query_params().get('MachineType')
def set_MachineType(self,MachineType):
self.add_query_param('MachineType',MachineType)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def METHOD_NAME(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_Tags(self):
return self.get_query_params().get('Tag')
def set_Tags(self, Tags):
for depth1 in range(len(Tags)):
if Tags[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
if Tags[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
def get_CreateType(self):
return self.get_query_params().get('CreateType')
def set_CreateType(self,CreateType):
self.add_query_param('CreateType',CreateType)
def get_ExpiredTagLists(self):
return self.get_query_params().get('ExpiredTagList')
def set_ExpiredTagLists(self, ExpiredTagLists):
for depth1 in range(len(ExpiredTagLists)):
if ExpiredTagLists[depth1] is not None:
self.add_query_param('ExpiredTagList.' + str(depth1 + 1) , ExpiredTagLists[depth1])
def get_DefaultStatus(self):
return self.get_query_params().get('DefaultStatus')
def set_DefaultStatus(self,DefaultStatus):
self.add_query_param('DefaultStatus',DefaultStatus)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_ClusterTypeLists(self):
return self.get_query_params().get('ClusterTypeList')
def set_ClusterTypeLists(self, ClusterTypeLists):
for depth1 in range(len(ClusterTypeLists)):
if ClusterTypeLists[depth1] is not None:
self.add_query_param('ClusterTypeList.' + str(depth1 + 1) , ClusterTypeLists[depth1] | null |
882 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmartag.endpoint import endpoint_data
class CreateSmartAccessGatewayRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Smartag', '2018-03-13', 'CreateSmartAccessGateway','smartag')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ReceiverTown(self): # String
return self.get_query_params().get('ReceiverTown')
def set_ReceiverTown(self, ReceiverTown): # String
self.add_query_param('ReceiverTown', ReceiverTown)
def get_ReceiverDistrict(self): # String
return self.get_query_params().get('ReceiverDistrict')
def set_ReceiverDistrict(self, ReceiverDistrict): # String
self.add_query_param('ReceiverDistrict', ReceiverDistrict)
def get_BuyerMessage(self): # String
return self.get_query_params().get('BuyerMessage')
def set_BuyerMessage(self, BuyerMessage): # String
self.add_query_param('BuyerMessage', BuyerMessage)
def get_ReceiverState(self): # String
return self.get_query_params().get('ReceiverState')
def set_ReceiverState(self, ReceiverState): # String
self.add_query_param('ReceiverState', ReceiverState)
def get_Period(self): # Integer
return self.get_query_params().get('Period')
def set_Period(self, Period): # Integer
self.add_query_param('Period', Period)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ReceiverPhone(self): # String
return self.get_query_params().get('ReceiverPhone')
def set_ReceiverPhone(self, ReceiverPhone): # String
self.add_query_param('ReceiverPhone', ReceiverPhone)
def get_HaType(self): # String
return self.get_query_params().get('HaType')
def set_HaType(self, HaType): # String
self.add_query_param('HaType', HaType)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_ReceiverCountry(self): # String
return self.get_query_params().get('ReceiverCountry')
def set_ReceiverCountry(self, ReceiverCountry): # String
self.add_query_param('ReceiverCountry', ReceiverCountry)
def get_MaxBandWidth(self): # Integer
return self.get_query_params().get('MaxBandWidth')
def set_MaxBandWidth(self, MaxBandWidth): # Integer
self.add_query_param('MaxBandWidth', MaxBandWidth)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_ReceiverAddress(self): # String
return self.get_query_params().get('ReceiverAddress')
def set_ReceiverAddress(self, ReceiverAddress): # String
self.add_query_param('ReceiverAddress', ReceiverAddress)
def get_HardWareSpec(self): # String
return self.get_query_params().get('HardWareSpec')
def set_HardWareSpec(self, HardWareSpec): # String
self.add_query_param('HardWareSpec', HardWareSpec)
def get_ReceiverEmail(self): # String
return self.get_query_params().get('ReceiverEmail')
def set_ReceiverEmail(self, ReceiverEmail): # String
self.add_query_param('ReceiverEmail', ReceiverEmail)
def get_ReceiverCity(self): # String
return self.get_query_params().get('ReceiverCity')
def set_ReceiverCity(self, ReceiverCity): # String
self.add_query_param('ReceiverCity', ReceiverCity)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def METHOD_NAME(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_CPEVersion(self): # String
return self.get_query_params().get('CPEVersion')
def set_CPEVersion(self, CPEVersion): # String
self.add_query_param('CPEVersion', CPEVersion)
def get_ReceiverMobile(self): # String
return self.get_query_params().get('ReceiverMobile')
def set_ReceiverMobile(self, ReceiverMobile): # String
self.add_query_param('ReceiverMobile', ReceiverMobile)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_ReceiverName(self): # String
return self.get_query_params().get('ReceiverName')
def set_ReceiverName(self, ReceiverName): # String
self.add_query_param('ReceiverName', ReceiverName)
def get_AlreadyHaveSag(self): # Boolean
return self.get_query_params().get('AlreadyHaveSag')
def set_AlreadyHaveSag(self, AlreadyHaveSag): # Boolean
self.add_query_param('AlreadyHaveSag', AlreadyHaveSag)
def get_ChargeType(self): # String
return self.get_query_params().get('ChargeType')
def set_ChargeType(self, ChargeType): # String
self.add_query_param('ChargeType', ChargeType)
def get_ReceiverZip(self): # String
return self.get_query_params().get('ReceiverZip')
def set_ReceiverZip(self, ReceiverZip): # String
self.add_query_param('ReceiverZip', ReceiverZip) | null |
883 | from rest_framework import status as http_status
from flask import redirect, request
import markupsafe
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError, PermissionsError
from framework import status
from transitions import MachineError
from osf.exceptions import UnsupportedSanctionHandlerKind, TokenError
def registration_approval_handler(action, registration, registered_from):
# TODO: Unnecessary and duplicated dictionary.
status.push_status_message({
'approve': 'Your registration approval has been accepted.',
'reject': 'Your disapproval has been accepted and the registration has been cancelled.',
}[action], kind='success', trust=False)
# Allow decorated view function to return response
return None
def embargo_handler(action, registration, registered_from):
status.push_status_message({
'approve': 'Your embargo approval has been accepted.',
'reject': 'Your disapproval has been accepted and the embargo has been cancelled.',
}[action], kind='success', trust=False)
# Allow decorated view function to return response
return None
def METHOD_NAME(action, registration, registered_from):
status.push_status_message({
'approve': 'Your approval to make this embargo public has been accepted.',
'reject': 'Your disapproval has been accepted and this embargo will not be made public.',
}[action], kind='success', trust=False)
# Allow decorated view function to return response
return None
def retraction_handler(action, registration, registered_from):
status.push_status_message({
'approve': 'Your withdrawal approval has been accepted.',
'reject': 'Your disapproval has been accepted and the withdrawal has been cancelled.'
}[action], kind='success', trust=False)
# Allow decorated view function to return response
return None
@must_be_logged_in
def sanction_handler(kind, action, payload, encoded_token, auth, **kwargs):
from osf.models import (
Embargo,
EmbargoTerminationApproval,
RegistrationApproval,
Retraction
)
Model = {
'registration': RegistrationApproval,
'embargo': Embargo,
'embargo_termination_approval': EmbargoTerminationApproval,
'retraction': Retraction
}.get(kind, None)
if not Model:
raise UnsupportedSanctionHandlerKind
sanction_id = payload.get('sanction_id', None)
sanction = Model.load(sanction_id)
err_code = None
err_message = None
if not sanction:
err_code = http_status.HTTP_400_BAD_REQUEST
err_message = 'There is no {0} associated with this token.'.format(
markupsafe.escape(Model.DISPLAY_NAME))
elif sanction.is_approved:
# Simply strip query params and redirect if already approved
return redirect(request.base_url)
elif sanction.is_rejected:
err_code = http_status.HTTP_410_GONE if kind in ['registration', 'embargo'] else http_status.HTTP_400_BAD_REQUEST
err_message = 'This registration {0} has been rejected.'.format(
markupsafe.escape(sanction.DISPLAY_NAME))
if err_code:
raise HTTPError(err_code, data=dict(
message_long=err_message
))
do_action = getattr(sanction, action, None)
if do_action:
registration = sanction.registrations.get()
registered_from = registration.registered_from
try:
do_action(user=auth.user, token=encoded_token)
except TokenError as e:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': e.message_short,
'message_long': str(e)
})
except PermissionsError as e:
raise HTTPError(http_status.HTTP_401_UNAUTHORIZED, data={
'message_short': 'Unauthorized access',
'message_long': str(e)
})
except MachineError as e:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Operation not allowed at this time',
'message_long': e.value
})
sanction.save()
return {
'registration': registration_approval_handler,
'embargo': embargo_handler,
'embargo_termination_approval': METHOD_NAME,
'retraction': retraction_handler,
}[kind](action, registration, registered_from) | null |
884 | # Drakkar-Software OctoBot-Tentacles
# Copyright (c) Drakkar-Software, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import threading
# comment imports to remove twitter from dependencies when tentacle is disabled
# import twitter
import octobot_services.channel as services_channel
import octobot_services.constants as services_constants
import octobot_services.service_feeds as service_feeds
import tentacles.Services.Services_bases as Services_bases
# disable inheritance to disable tentacle visibility. Disabled as starting from feb 9 2023, API is now paid only
# class TwitterServiceFeedChannel(services_channel.AbstractServiceFeedChannel):
class TwitterServiceFeedChannel:
pass
# disable inheritance to disable tentacle visibility. Disabled as starting from feb 9 2023, API is now paid only
# class TwitterServiceFeed(service_feeds.AbstractServiceFeed, threading.Thread):
class TwitterServiceFeed:
FEED_CHANNEL = TwitterServiceFeedChannel
REQUIRED_SERVICES = [Services_bases.TwitterService]
def __init__(self, config, main_async_loop, bot_id):
super().__init__(config, main_async_loop, bot_id)
threading.Thread.__init__(self, name=self.get_name())
self.user_ids = []
self.hashtags = []
self.counter = 0
async def METHOD_NAME(self) -> bool:
threading.Thread.start(self)
return True
# merge new config into existing config
def update_feed_config(self, config):
if services_constants.CONFIG_TWITTERS_ACCOUNTS in self.feed_config:
self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS] = {
**self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS],
**config[services_constants.CONFIG_TWITTERS_ACCOUNTS]}
else:
self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS] = config[
services_constants.CONFIG_TWITTERS_ACCOUNTS]
if services_constants.CONFIG_TWITTERS_HASHTAGS in self.feed_config:
self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS] = {
**self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS],
**config[services_constants.CONFIG_TWITTERS_HASHTAGS]}
else:
self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS] = config[
services_constants.CONFIG_TWITTERS_HASHTAGS]
def _init_users_accounts(self):
tempo_added_accounts = []
for symbol in self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS]:
for account in self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS][symbol]:
if account not in tempo_added_accounts:
tempo_added_accounts.append(account)
try:
self.user_ids.append(str(self.services[0].get_user_id(account)))
except twitter.TwitterError as e:
self.logger.error(account + " : " + str(e))
def _init_hashtags(self):
for symbol in self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS]:
for hashtag in self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS][symbol]:
if hashtag not in self.hashtags:
self.hashtags.append(hashtag)
def _initialize(self):
if not self.user_ids:
self._init_users_accounts()
if not self.hashtags:
self._init_hashtags()
def _something_to_watch(self):
return (services_constants.CONFIG_TWITTERS_HASHTAGS in self.feed_config and self.feed_config[
services_constants.CONFIG_TWITTERS_HASHTAGS]) \
or (services_constants.CONFIG_TWITTERS_ACCOUNTS in self.feed_config and self.feed_config[
services_constants.CONFIG_TWITTERS_ACCOUNTS])
async def _start_listener(self):
for tweet in self.services[0].get_endpoint().GetStreamFilter(follow=self.user_ids,
track=self.hashtags,
stall_warnings=True):
self.counter += 1
string_tweet = self.services[0].get_tweet_text(tweet)
if string_tweet:
tweet_desc = str(tweet).lower()
self._notify_consumers(
{
services_constants.FEED_METADATA: tweet_desc,
services_constants.CONFIG_TWEET: tweet,
services_constants.CONFIG_TWEET_DESCRIPTION: string_tweet.lower()
}
)
async def _start_service_feed(self):
while not self.should_stop:
try:
await self._start_listener()
except twitter.error.TwitterError as e:
self.logger.exception(e, True, f"Error when receiving Twitter feed: {e.message} ({e})")
self.should_stop = True
except Exception as e:
self.logger.exception(e, True, f"Error when receiving Twitter feed: ({e})")
self.should_stop = True
return False | null |
885 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class AddTagsToResourceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'AddTagsToResource')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Tag4value(self): # String
return self.get_query_params().get('Tag.4.value')
def set_Tag4value(self, Tag4value): # String
self.add_query_param('Tag.4.value', Tag4value)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Tag2key(self): # String
return self.get_query_params().get('Tag.2.key')
def set_Tag2key(self, Tag2key): # String
self.add_query_param('Tag.2.key', Tag2key)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Tag3key(self): # String
return self.get_query_params().get('Tag.3.key')
def set_Tag3key(self, Tag3key): # String
self.add_query_param('Tag.3.key', Tag3key)
def get_Tag1value(self): # String
return self.get_query_params().get('Tag.1.value')
def set_Tag1value(self, Tag1value): # String
self.add_query_param('Tag.1.value', Tag1value)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_Tag3value(self): # String
return self.get_query_params().get('Tag.3.value')
def set_Tag3value(self, Tag3value): # String
self.add_query_param('Tag.3.value', Tag3value)
def get_proxyId(self): # String
return self.get_query_params().get('proxyId')
def set_proxyId(self, proxyId): # String
self.add_query_param('proxyId', proxyId)
def get_Tag5key(self): # String
return self.get_query_params().get('Tag.5.key')
def set_Tag5key(self, Tag5key): # String
self.add_query_param('Tag.5.key', Tag5key)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_Tag5value(self): # String
return self.get_query_params().get('Tag.5.value')
def set_Tag5value(self, Tag5value): # String
self.add_query_param('Tag.5.value', Tag5value)
def get_Tags(self): # String
return self.get_query_params().get('Tags')
def set_Tags(self, Tags): # String
self.add_query_param('Tags', Tags)
def get_Tag1key(self): # String
return self.get_query_params().get('Tag.1.key')
def set_Tag1key(self, Tag1key): # String
self.add_query_param('Tag.1.key', Tag1key)
def get_Tag2value(self): # String
return self.get_query_params().get('Tag.2.value')
def set_Tag2value(self, Tag2value): # String
self.add_query_param('Tag.2.value', Tag2value)
def get_Tag4key(self): # String
return self.get_query_params().get('Tag.4.key')
def METHOD_NAME(self, Tag4key): # String
self.add_query_param('Tag.4.key', Tag4key) | null |
886 | #
# subunit: extensions to python unittest to get test results from subprocesses.
# Copyright (C) 2005 Robert Collins <[email protected]>
# Copyright (C) 2011 Martin Pool <[email protected]>
#
# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
# license at the users choice. A copy of both licenses are available in the
# project source as Apache-2.0 and BSD. You may not use this file except in
# compliance with one of these two licences.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# license you chose for the specific language governing permissions and
# limitations under that license.
#
import unittest
from testtools.compat import _b, BytesIO
import subunit.chunked
def test_suite():
loader = subunit.tests.TestUtil.TestLoader()
result = loader.loadTestsFromName(__name__)
return result
class TestDecode(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.output = BytesIO()
self.decoder = subunit.chunked.Decoder(self.output)
def test_close_read_length_short_errors(self):
self.assertRaises(ValueError, self.decoder.close)
def test_close_body_short_errors(self):
self.assertEqual(None, self.decoder.write(_b('2\r\na')))
self.assertRaises(ValueError, self.decoder.close)
def test_close_body_buffered_data_errors(self):
self.assertEqual(None, self.decoder.write(_b('2\r')))
self.assertRaises(ValueError, self.decoder.close)
def test_close_after_finished_stream_safe(self):
self.assertEqual(None, self.decoder.write(_b('2\r\nab')))
self.assertEqual(_b(''), self.decoder.write(_b('0\r\n')))
self.decoder.close()
def test_decode_nothing(self):
self.assertEqual(_b(''), self.decoder.write(_b('0\r\n')))
self.assertEqual(_b(''), self.output.getvalue())
def test_decode_serialised_form(self):
self.assertEqual(None, self.decoder.write(_b("F\r\n")))
self.assertEqual(None, self.decoder.write(_b("serialised\n")))
self.assertEqual(_b(''), self.decoder.write(_b("form0\r\n")))
def test_decode_short(self):
self.assertEqual(_b(''), self.decoder.write(_b('3\r\nabc0\r\n')))
self.assertEqual(_b('abc'), self.output.getvalue())
def test_decode_combines_short(self):
self.assertEqual(_b(''), self.decoder.write(_b('6\r\nabcdef0\r\n')))
self.assertEqual(_b('abcdef'), self.output.getvalue())
def METHOD_NAME(self):
self.assertEqual(_b('1234'), self.decoder.write(_b('3\r\nabc0\r\n1234')))
self.assertEqual(_b('abc'), self.output.getvalue())
def test_decode_write_after_finished_errors(self):
self.assertEqual(_b('1234'), self.decoder.write(_b('3\r\nabc0\r\n1234')))
self.assertRaises(ValueError, self.decoder.write, _b(''))
def test_decode_hex(self):
self.assertEqual(_b(''), self.decoder.write(_b('A\r\n12345678900\r\n')))
self.assertEqual(_b('1234567890'), self.output.getvalue())
def test_decode_long_ranges(self):
self.assertEqual(None, self.decoder.write(_b('10000\r\n')))
self.assertEqual(None, self.decoder.write(_b('1' * 65536)))
self.assertEqual(None, self.decoder.write(_b('10000\r\n')))
self.assertEqual(None, self.decoder.write(_b('2' * 65536)))
self.assertEqual(_b(''), self.decoder.write(_b('0\r\n')))
self.assertEqual(_b('1' * 65536 + '2' * 65536), self.output.getvalue())
def test_decode_newline_nonstrict(self):
"""Tolerate chunk markers with no CR character."""
# From <http://pad.lv/505078>
self.decoder = subunit.chunked.Decoder(self.output, strict=False)
self.assertEqual(None, self.decoder.write(_b('a\n')))
self.assertEqual(None, self.decoder.write(_b('abcdeabcde')))
self.assertEqual(_b(''), self.decoder.write(_b('0\n')))
self.assertEqual(_b('abcdeabcde'), self.output.getvalue())
def test_decode_strict_newline_only(self):
"""Reject chunk markers with no CR character in strict mode."""
# From <http://pad.lv/505078>
self.assertRaises(ValueError,
self.decoder.write, _b('a\n'))
def test_decode_strict_multiple_crs(self):
self.assertRaises(ValueError,
self.decoder.write, _b('a\r\r\n'))
def test_decode_short_header(self):
self.assertRaises(ValueError,
self.decoder.write, _b('\n'))
class TestEncode(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.output = BytesIO()
self.encoder = subunit.chunked.Encoder(self.output)
def test_encode_nothing(self):
self.encoder.close()
self.assertEqual(_b('0\r\n'), self.output.getvalue())
def test_encode_empty(self):
self.encoder.write(_b(''))
self.encoder.close()
self.assertEqual(_b('0\r\n'), self.output.getvalue())
def test_encode_short(self):
self.encoder.write(_b('abc'))
self.encoder.close()
self.assertEqual(_b('3\r\nabc0\r\n'), self.output.getvalue())
def test_encode_combines_short(self):
self.encoder.write(_b('abc'))
self.encoder.write(_b('def'))
self.encoder.close()
self.assertEqual(_b('6\r\nabcdef0\r\n'), self.output.getvalue())
def test_encode_over_9_is_in_hex(self):
self.encoder.write(_b('1234567890'))
self.encoder.close()
self.assertEqual(_b('A\r\n12345678900\r\n'), self.output.getvalue())
def test_encode_long_ranges_not_combined(self):
self.encoder.write(_b('1' * 65536))
self.encoder.write(_b('2' * 65536))
self.encoder.close()
self.assertEqual(_b('10000\r\n' + '1' * 65536 + '10000\r\n' +
'2' * 65536 + '0\r\n'), self.output.getvalue()) | null |
887 | #/*##########################################################################
# Copyright (C) 2004-2014 V.A. Sole, European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#############################################################################*/
"""This plugin allows to configure and execute a batch fitting for all
spectra in the stack.
The user can select the fit function and a background function from a
selection of functions, and must provide the initial estimation for
the iterative fit.
The fit result is saved to file, at the end. A 2D map is created for each
fitted parameter, and saved in EDF and ASCII formats."""
__author__ = "V.A. Sole - ESRF Data Analysis"
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
import logging
_logger = logging.getLogger(__name__)
try:
from PyMca5 import StackPluginBase
from PyMca5.PyMcaGui import StackSimpleFitWindow
from PyMca5.PyMcaGui import PyMca_Icons
except ImportError:
_logger.warning("FitStackPlugin importing from somewhere else")
class FitStackPlugin(StackPluginBase.StackPluginBase):
def __init__(self, stackWindow, **kw):
if _logger.getEffectiveLevel() == logging.DEBUG:
StackPluginBase.pluginBaseLogger.setLevel(logging.DEBUG)
StackPluginBase.StackPluginBase.__init__(self, stackWindow, **kw)
self.methodDict = {}
function = self.fitStack
info = "Fit stack with user defined functions"
icon = PyMca_Icons.fit
self.methodDict["Fit Stack"] =[function,
info,
icon]
self.__methodKeys = ["Fit Stack"]
self.simpleFitWindow = None
def stackUpdated(self):
if self.simpleFitWindow is None:
return
self.__updateOwnData()
def selectionMaskUpdated(self):
if self.simpleFitWindow is None:
return
self.simpleFitWindow.setMask(self.getStackSelectionMask())
def stackClosed(self):
if self.simpleFitWindow is not None:
self.simpleFitWindow.close()
#Methods implemented by the plugin
def getMethods(self):
return self.__methodKeys
def getMethodToolTip(self, name):
return self.methodDict[name][1]
def getMethodPixmap(self, name):
return self.methodDict[name][2]
def applyMethod(self, name):
return self.methodDict[name][0]()
def __updateOwnData(self):
activeCurve = self.getActiveCurve()
if activeCurve is None:
return
#this can be problematic if a fit is going on...
x, spectrum, legend, info = activeCurve
xlabel = info['xlabel']
ylabel = info['ylabel']
xmin, xmax = self.getGraphXLimits()
ymin, ymax = self.getGraphYLimits()
mcaIndex = self.getStackInfo()['McaIndex']
self.simpleFitWindow.setSpectrum(x,
spectrum,
xmin=xmin,
xmax=xmax)
self.simpleFitWindow.setData(x,
self.getStackData(),
data_index=mcaIndex,
mask=self.getStackSelectionMask())
def fitStack(self):
if self.simpleFitWindow is None:
self.simpleFitWindow = StackSimpleFitWindow.StackSimpleFitWindow()
self.__updateOwnData()
self.simpleFitWindow.show()
MENU_TEXT = "Stack Simple Fitting"
def METHOD_NAME(stackWindow, **kw):
ob = FitStackPlugin(stackWindow)
return ob | null |
888 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
import json
class CreateAppInstanceGroupRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'appstream-center', '2021-09-01', 'CreateAppInstanceGroup')
self.set_method('POST')
def get_RuntimePolicy(self): # Struct
return self.get_body_params().get('RuntimePolicy')
def set_RuntimePolicy(self, RuntimePolicy): # Struct
self.add_body_params("RuntimePolicy", json.dumps(RuntimePolicy))
def get_BizRegionId(self): # String
return self.get_body_params().get('BizRegionId')
def set_BizRegionId(self, BizRegionId): # String
self.add_body_params('BizRegionId', BizRegionId)
def get_ProductType(self): # String
return self.get_body_params().get('ProductType')
def set_ProductType(self, ProductType): # String
self.add_body_params('ProductType', ProductType)
def get_Network(self): # Struct
return self.get_body_params().get('Network')
def set_Network(self, Network): # Struct
self.add_body_params("Network", json.dumps(Network))
def get_SessionTimeout(self): # Integer
return self.get_body_params().get('SessionTimeout')
def set_SessionTimeout(self, SessionTimeout): # Integer
self.add_body_params('SessionTimeout', SessionTimeout)
def get_ChargeResourceMode(self): # String
return self.get_body_params().get('ChargeResourceMode')
def set_ChargeResourceMode(self, ChargeResourceMode): # String
self.add_body_params('ChargeResourceMode', ChargeResourceMode)
def get_AppCenterImageId(self): # String
return self.get_body_params().get('AppCenterImageId')
def set_AppCenterImageId(self, AppCenterImageId): # String
self.add_body_params('AppCenterImageId', AppCenterImageId)
def METHOD_NAME(self): # Struct
return self.get_body_params().get('UserInfo')
def set_UserInfo(self, UserInfo): # Struct
self.add_body_params("UserInfo", json.dumps(UserInfo))
def get_PreOpenAppId(self): # String
return self.get_body_params().get('PreOpenAppId')
def set_PreOpenAppId(self, PreOpenAppId): # String
self.add_body_params('PreOpenAppId', PreOpenAppId)
def get_Period(self): # Integer
return self.get_body_params().get('Period')
def set_Period(self, Period): # Integer
self.add_body_params('Period', Period)
def get_AutoPay(self): # Boolean
return self.get_body_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_body_params('AutoPay', AutoPay)
def get_NodePool(self): # Struct
return self.get_body_params().get('NodePool')
def set_NodePool(self, NodePool): # Struct
self.add_body_params("NodePool", json.dumps(NodePool))
def get_PromotionId(self): # String
return self.get_body_params().get('PromotionId')
def set_PromotionId(self, PromotionId): # String
self.add_body_params('PromotionId', PromotionId)
def get_Userss(self): # RepeatList
return self.get_body_params().get('Users')
def set_Userss(self, Users): # RepeatList
for depth1 in range(len(Users)):
self.add_body_params('Users.' + str(depth1 + 1), Users[depth1])
def get_AppInstanceGroupName(self): # String
return self.get_body_params().get('AppInstanceGroupName')
def set_AppInstanceGroupName(self, AppInstanceGroupName): # String
self.add_body_params('AppInstanceGroupName', AppInstanceGroupName)
def get_PeriodUnit(self): # String
return self.get_body_params().get('PeriodUnit')
def set_PeriodUnit(self, PeriodUnit): # String
self.add_body_params('PeriodUnit', PeriodUnit)
def get_AutoRenew(self): # Boolean
return self.get_body_params().get('AutoRenew')
def set_AutoRenew(self, AutoRenew): # Boolean
self.add_body_params('AutoRenew', AutoRenew)
def get_ChargeType(self): # String
return self.get_body_params().get('ChargeType')
def set_ChargeType(self, ChargeType): # String
self.add_body_params('ChargeType', ChargeType) | null |
889 | # Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
"""
Handles all caching logic including:
- Retrieving from cache
- Saving to cache
- Determining whether not certain items have expired and need to be refreshed
"""
from __future__ import annotations
import json
import logging
import os
from datetime import datetime, timezone
from functools import wraps
from pathlib import Path
from typing import Sequence
from .._vendor.appdirs import user_cache_dir
from ..base.constants import APP_NAME, NOTICES_CACHE_FN, NOTICES_CACHE_SUBDIR
from ..utils import ensure_dir_exists
from .types import ChannelNotice, ChannelNoticeResponse
logger = logging.getLogger(__name__)
def cached_response(func):
@wraps(func)
def wrapper(url: str, name: str):
cache_dir = get_notices_cache_dir()
cache_val = get_notice_response_from_cache(url, name, cache_dir)
if cache_val:
return cache_val
return_value = func(url, name)
if return_value is not None:
write_notice_response_to_cache(return_value, cache_dir)
return return_value
return wrapper
def is_notice_response_cache_expired(
channel_notice_response: ChannelNoticeResponse,
) -> bool:
"""
This checks the contents of the cache response to see if it is expired.
If for whatever reason we encounter an exception while parsing the individual
messages, we assume an invalid cache and return true.
"""
now = datetime.now(timezone.utc)
def is_channel_notice_expired(expired_at: datetime | None) -> bool:
"""If there is no "expired_at" field present assume it is expired."""
if expired_at is None:
return True
return expired_at < now
return any(
is_channel_notice_expired(chn.expired_at)
for chn in channel_notice_response.notices
)
@ensure_dir_exists
def get_notices_cache_dir() -> Path:
"""Returns the location of the notices cache directory as a Path object"""
cache_dir = user_cache_dir(APP_NAME, appauthor=APP_NAME)
return Path(cache_dir).joinpath(NOTICES_CACHE_SUBDIR)
def METHOD_NAME() -> Path:
"""Returns the location of the notices cache file as a Path object"""
cache_dir = get_notices_cache_dir()
cache_file = cache_dir.joinpath(NOTICES_CACHE_FN)
if not cache_file.is_file():
with open(cache_file, "w") as fp:
fp.write("")
return cache_file
def get_notice_response_from_cache(
url: str, name: str, cache_dir: Path
) -> ChannelNoticeResponse | None:
"""Retrieves a notice response object from cache if it exists."""
cache_key = ChannelNoticeResponse.get_cache_key(url, cache_dir)
if os.path.isfile(cache_key):
with open(cache_key) as fp:
data = json.load(fp)
chn_ntc_resp = ChannelNoticeResponse(url, name, data)
if not is_notice_response_cache_expired(chn_ntc_resp):
return chn_ntc_resp
def write_notice_response_to_cache(
channel_notice_response: ChannelNoticeResponse, cache_dir: Path
) -> None:
"""Writes our notice data to our local cache location."""
cache_key = ChannelNoticeResponse.get_cache_key(
channel_notice_response.url, cache_dir
)
with open(cache_key, "w") as fp:
json.dump(channel_notice_response.json_data, fp)
def mark_channel_notices_as_viewed(
cache_file: Path, channel_notices: Sequence[ChannelNotice]
) -> None:
"""Insert channel notice into our database marking it as read."""
notice_ids = {chn.id for chn in channel_notices}
with open(cache_file) as fp:
contents: str = fp.read()
contents_unique = set(filter(None, set(contents.splitlines())))
contents_new = contents_unique.union(notice_ids)
# Save new version of cache file
with open(cache_file, "w") as fp:
fp.write("\n".join(contents_new))
def get_viewed_channel_notice_ids(
cache_file: Path, channel_notices: Sequence[ChannelNotice]
) -> set[str]:
"""Return the ids of the channel notices which have already been seen."""
notice_ids = {chn.id for chn in channel_notices}
with open(cache_file) as fp:
contents: str = fp.read()
contents_unique = set(filter(None, set(contents.splitlines())))
return notice_ids.intersection(contents_unique) | null |
890 | import asyncio
import unittest
from typing import Awaitable, Optional
from unittest.mock import AsyncMock, MagicMock, patch
from hummingbot.client.config.client_config_map import ClientConfigMap
from hummingbot.client.config.config_helpers import ClientConfigAdapter
from hummingbot.connector.derivative.dydx_perpetual.dydx_perpetual_derivative import DydxPerpetualDerivative
from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant
class DydxPerpetualUserStreamDataSourceUnitTests(unittest.TestCase):
# logging.Level required to receive logs from the data source logger
level = 0
@classmethod
def METHOD_NAME(cls) -> None:
super().METHOD_NAME()
cls.ev_loop = asyncio.get_event_loop()
cls.base_asset = "COINALPHA"
cls.quote_asset = "HBOT"
cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}"
cls.ex_trading_pair = f"{cls.base_asset}-{cls.quote_asset}"
def setUp(self) -> None:
super().setUp()
self.log_records = []
self.async_task: Optional[asyncio.Task] = None
client_config_map = ClientConfigAdapter(ClientConfigMap())
self.connector = DydxPerpetualDerivative(
client_config_map,
dydx_perpetual_api_key="someApiKey",
dydx_perpetual_api_secret="AA1p9oklqBkDT8xw2FRWwlZCfUf98wEG",
dydx_perpetual_passphrase="somePassphrase",
dydx_perpetual_ethereum_address="someEthAddress",
dydx_perpetual_stark_private_key="0123456789",
trading_pairs=[self.trading_pair],
trading_required=False,
)
self.data_source = self.connector._create_user_stream_data_source()
self.data_source.logger().setLevel(1)
self.data_source.logger().addHandler(self)
self.mocking_assistant = NetworkMockingAssistant()
self.resume_test_event = asyncio.Event()
def tearDown(self) -> None:
self.async_task and self.async_task.cancel()
super().tearDown()
def handle(self, record):
self.log_records.append(record)
def _is_logged(self, log_level: str, message: str) -> bool:
return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records)
def _create_exception_and_unlock_test_with_event(self, exception):
self.resume_test_event.set()
raise exception
def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1):
ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout))
return ret
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
@patch(
"hummingbot.connector.derivative.dydx_perpetual.dydx_perpetual_user_stream_data_source."
"DydxPerpetualUserStreamDataSource._sleep"
)
def test_listen_for_user_stream_raises_cancelled_exception(self, _, ws_connect_mock):
ws_connect_mock.side_effect = asyncio.CancelledError
with self.assertRaises(asyncio.CancelledError):
self.async_run_with_timeout(self.data_source.listen_for_user_stream(asyncio.Queue()))
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
@patch("hummingbot.connector.derivative.dydx_perpetual.dydx_perpetual_auth.DydxPerpetualAuth._get_iso_timestamp")
@patch(
"hummingbot.connector.derivative.dydx_perpetual.dydx_perpetual_user_stream_data_source."
"DydxPerpetualUserStreamDataSource._sleep"
)
def test_listen_for_user_stream_raises_logs_exception(self, mock_sleep, ts_mock, ws_connect_mock):
mock_sleep.side_effect = lambda: (self.ev_loop.run_until_complete(asyncio.sleep(0.5)))
ts_mock.return_value = "2022-07-06T12:20:53.000Z"
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
ws_connect_mock.return_value.receive.side_effect = lambda *_: self._create_exception_and_unlock_test_with_event(
Exception("TEST ERROR")
)
self.async_task = self.ev_loop.create_task(self.data_source.listen_for_user_stream(asyncio.Queue()))
self.async_run_with_timeout(self.resume_test_event.wait(), 1.0)
self.assertTrue(
self._is_logged("ERROR", "Unexpected error while listening to user stream. Retrying after 5 seconds...")
)
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
@patch("hummingbot.connector.derivative.dydx_perpetual.dydx_perpetual_auth.DydxPerpetualAuth._get_iso_timestamp")
def test_ws_authentication_successful(self, ts_mock: MagicMock, ws_connect_mock):
ts_mock.return_value = "2022-07-06T12:20:53.000Z"
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.data_source._connected_websocket_assistant())
json_msgs = self.mocking_assistant.json_messages_sent_through_websocket(ws_connect_mock.return_value)
self.assertEqual("someApiKey", json_msgs[0]["apiKey"])
self.assertEqual("somePassphrase", json_msgs[0]["passphrase"])
self.assertEqual(ts_mock.return_value, json_msgs[0]["timestamp"])
self.assertEqual("MLJvgJDWv-o1lz1e6oRuU96SbCay1Qo9m-E6kKleOxY=", json_msgs[0]["signature"]) | null |
891 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkoceanbasepro.endpoint import endpoint_data
class DescribeOasSlowSQLListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'OceanBasePro', '2019-09-01', 'DescribeOasSlowSQLList','oceanbase')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_StartTime(self): # String
return self.get_body_params().get('StartTime')
def set_StartTime(self, StartTime): # String
self.add_body_params('StartTime', StartTime)
def get_SearchRule(self): # String
return self.get_body_params().get('SearchRule')
def set_SearchRule(self, SearchRule): # String
self.add_body_params('SearchRule', SearchRule)
def get_MergeDynamicSql(self): # Boolean
return self.get_body_params().get('MergeDynamicSql')
def set_MergeDynamicSql(self, MergeDynamicSql): # Boolean
self.add_body_params('MergeDynamicSql', MergeDynamicSql)
def get_DynamicSql(self): # Boolean
return self.get_body_params().get('DynamicSql')
def set_DynamicSql(self, DynamicSql): # Boolean
self.add_body_params('DynamicSql', DynamicSql)
def get_SqlTextLength(self): # Long
return self.get_body_params().get('SqlTextLength')
def set_SqlTextLength(self, SqlTextLength): # Long
self.add_body_params('SqlTextLength', SqlTextLength)
def get_TenantId(self): # String
return self.get_body_params().get('TenantId')
def set_TenantId(self, TenantId): # String
self.add_body_params('TenantId', TenantId)
def get_SearchValue(self): # String
return self.get_body_params().get('SearchValue')
def set_SearchValue(self, SearchValue): # String
self.add_body_params('SearchValue', SearchValue)
def get_SqlId(self): # String
return self.get_body_params().get('SqlId')
def set_SqlId(self, SqlId): # String
self.add_body_params('SqlId', SqlId)
def get_FilterCondition(self): # String
return self.get_body_params().get('FilterCondition')
def set_FilterCondition(self, FilterCondition): # String
self.add_body_params('FilterCondition', FilterCondition)
def get_SearchParam(self): # String
return self.get_body_params().get('SearchParam')
def set_SearchParam(self, SearchParam): # String
self.add_body_params('SearchParam', SearchParam)
def get_EndTime(self): # String
return self.get_body_params().get('EndTime')
def set_EndTime(self, EndTime): # String
self.add_body_params('EndTime', EndTime)
def get_NodeIp(self): # String
return self.get_body_params().get('NodeIp')
def set_NodeIp(self, NodeIp): # String
self.add_body_params('NodeIp', NodeIp)
def get_InstanceId(self): # String
return self.get_body_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_body_params('InstanceId', InstanceId)
def METHOD_NAME(self): # String
return self.get_body_params().get('DbName')
def set_DbName(self, DbName): # String
self.add_body_params('DbName', DbName)
def get_SearchKeyWord(self): # String
return self.get_body_params().get('SearchKeyWord')
def set_SearchKeyWord(self, SearchKeyWord): # String
self.add_body_params('SearchKeyWord', SearchKeyWord)
def get_AcceptLanguage(self): # String
return self.get_body_params().get('AcceptLanguage')
def set_AcceptLanguage(self, AcceptLanguage): # String
self.add_body_params('AcceptLanguage', AcceptLanguage) | null |
892 | import asyncio
import unittest
from unittest.mock import MagicMock, patch
from prompt_toolkit.widgets import Button
from hummingbot.client.config.client_config_map import ClientConfigMap
from hummingbot.client.config.config_helpers import ClientConfigAdapter, read_system_configs_from_yml
from hummingbot.client.hummingbot_application import HummingbotApplication
from hummingbot.client.tab.data_types import CommandTab
from hummingbot.client.ui.custom_widgets import CustomTextArea
from hummingbot.client.ui.hummingbot_cli import HummingbotCLI
from hummingbot.core.event.event_listener import EventListener
from hummingbot.core.event.events import HummingbotUIEvent
class HummingbotCLITest(unittest.TestCase):
command_name = "command_1"
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.ev_loop = asyncio.get_event_loop()
cls.ev_loop.run_until_complete(read_system_configs_from_yml())
def setUp(self) -> None:
super().setUp()
self.client_config_map = ClientConfigAdapter(ClientConfigMap())
tabs = {self.command_name: CommandTab(self.command_name, None, None, None, MagicMock())}
self.mock_hb = MagicMock()
self.app = HummingbotCLI(
client_config_map=self.client_config_map,
input_handler=None,
bindings=None,
completer=None,
command_tabs=tabs)
self.app.app = MagicMock()
self.hb = HummingbotApplication()
def METHOD_NAME(self):
tab = self.app.command_tabs[self.command_name]
tab.close_button = MagicMock()
tab.button = MagicMock()
tab.output_field = MagicMock()
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": True})
self.assertIsNone(tab.button)
self.assertIsNone(tab.close_button)
self.assertIsNone(tab.output_field)
self.assertFalse(tab.is_selected)
self.assertEqual(tab.tab_index, 0)
def test_handle_tab_command_create_new_tab_and_display(self):
tab = self.app.command_tabs[self.command_name]
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.assertIsInstance(tab.button, Button)
self.assertIsInstance(tab.close_button, Button)
self.assertIsInstance(tab.output_field, CustomTextArea)
self.assertEqual(tab.tab_index, 1)
self.assertTrue(tab.is_selected)
self.assertTrue(tab.tab_class.display.called)
@patch("hummingbot.client.ui.layout.Layout")
@patch("hummingbot.client.ui.layout.FloatContainer")
@patch("hummingbot.client.ui.layout.ConditionalContainer")
@patch("hummingbot.client.ui.layout.Box")
@patch("hummingbot.client.ui.layout.HSplit")
@patch("hummingbot.client.ui.layout.VSplit")
def test_handle_tab_command_on_existing_tab(self, mock_vsplit, mock_hsplit, mock_box, moc_cc, moc_fc, mock_layout):
tab = self.app.command_tabs[self.command_name]
tab.button = MagicMock()
tab.output_field = MagicMock()
tab.close_button = MagicMock()
tab.is_selected = False
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.assertTrue(tab.is_selected)
self.assertTrue(tab.tab_class.display.call_count == 1)
# Test display not called if there is a running task
tab.is_selected = False
tab.task = MagicMock()
tab.task.done.return_value = False
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.assertTrue(tab.is_selected)
self.assertTrue(tab.tab_class.display.call_count == 1)
@patch("hummingbot.client.ui.layout.Layout")
@patch("hummingbot.client.ui.layout.FloatContainer")
@patch("hummingbot.client.ui.layout.ConditionalContainer")
@patch("hummingbot.client.ui.layout.Box")
@patch("hummingbot.client.ui.layout.HSplit")
@patch("hummingbot.client.ui.layout.VSplit")
def test_tab_navigation(self, mock_vsplit, mock_hsplit, mock_box, moc_cc, moc_fc, mock_layout):
tab2 = CommandTab("command_2", None, None, None, MagicMock(), False)
self.app.command_tabs["command_2"] = tab2
tab1 = self.app.command_tabs[self.command_name]
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.app.handle_tab_command(self.mock_hb, "command_2", {"close": False})
self.assertTrue(tab2.is_selected)
self.app.tab_navigate_left()
self.assertTrue(tab1.is_selected)
self.assertFalse(tab2.is_selected)
self.app.tab_navigate_left()
self.assertTrue(all(not t.is_selected for t in self.app.command_tabs.values()))
self.app.tab_navigate_left()
self.assertTrue(all(not t.is_selected for t in self.app.command_tabs.values()))
self.app.tab_navigate_right()
self.assertTrue(tab1.is_selected)
self.app.tab_navigate_right()
self.assertFalse(tab1.is_selected)
self.assertTrue(tab2.is_selected)
self.app.tab_navigate_right()
self.assertFalse(tab1.is_selected)
self.assertTrue(tab2.is_selected)
@patch("hummingbot.client.ui.hummingbot_cli.init_logging")
def test_did_start_ui(self, mock_init_logging: MagicMock):
class UIStartHandler(EventListener):
def __init__(self):
super().__init__()
self.mock = MagicMock()
def __call__(self, _):
self.mock()
handler: UIStartHandler = UIStartHandler()
self.app.add_listener(HummingbotUIEvent.Start, handler)
self.app.did_start_ui()
mock_init_logging.assert_called()
handler.mock.assert_called() | null |
893 | # Copyright (c) ZenML GmbH 2022. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Entrypoint of the Kubernetes master/orchestrator pod."""
import argparse
import socket
from kubernetes import client as k8s_client
from zenml.client import Client
from zenml.entrypoints.step_entrypoint_configuration import (
StepEntrypointConfiguration,
)
from zenml.integrations.kubernetes.flavors.kubernetes_orchestrator_flavor import (
KubernetesOrchestratorSettings,
)
from zenml.integrations.kubernetes.orchestrators import kube_utils
from zenml.integrations.kubernetes.orchestrators.kubernetes_orchestrator import (
ENV_ZENML_KUBERNETES_RUN_ID,
KubernetesOrchestrator,
)
from zenml.integrations.kubernetes.orchestrators.manifest_utils import (
build_pod_manifest,
)
from zenml.logger import get_logger
from zenml.orchestrators.dag_runner import ThreadedDagRunner
from zenml.orchestrators.utils import get_config_environment_vars
logger = get_logger(__name__)
def parse_args() -> argparse.Namespace:
"""Parse entrypoint arguments.
Returns:
Parsed args.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--run_name", type=str, required=True)
parser.add_argument("--deployment_id", type=str, required=True)
parser.add_argument("--kubernetes_namespace", type=str, required=True)
return parser.parse_args()
def main() -> None:
"""Entrypoint of the k8s master/orchestrator pod."""
# Log to the container's stdout so it can be streamed by the client.
logger.info("Kubernetes orchestrator pod started.")
# Parse / extract args.
args = parse_args()
orchestrator_run_id = socket.gethostname()
deployment_config = Client().get_deployment(args.deployment_id)
pipeline_dag = {
step_name: step.spec.upstream_steps
for step_name, step in deployment_config.step_configurations.items()
}
step_command = StepEntrypointConfiguration.get_entrypoint_command()
active_stack = Client().active_stack
mount_local_stores = active_stack.orchestrator.config.is_local
# Get a Kubernetes client from the active Kubernetes orchestrator, but
# override the `incluster` setting to `True` since we are running inside
# the Kubernetes cluster.
orchestrator = active_stack.orchestrator
assert isinstance(orchestrator, KubernetesOrchestrator)
kube_client = orchestrator.get_kube_client(incluster=True)
core_api = k8s_client.CoreV1Api(kube_client)
def METHOD_NAME(step_name: str) -> None:
"""Run a pipeline step in a separate Kubernetes pod.
Args:
step_name: Name of the step.
"""
# Define Kubernetes pod name.
pod_name = f"{orchestrator_run_id}-{step_name}"
pod_name = kube_utils.sanitize_pod_name(pod_name)
image = KubernetesOrchestrator.get_image(
deployment=deployment_config, step_name=step_name
)
step_args = StepEntrypointConfiguration.get_entrypoint_arguments(
step_name=step_name, deployment_id=deployment_config.id
)
step_config = deployment_config.step_configurations[step_name].config
settings = KubernetesOrchestratorSettings.parse_obj(
step_config.settings.get("orchestrator.kubernetes", {})
)
env = get_config_environment_vars()
env[ENV_ZENML_KUBERNETES_RUN_ID] = orchestrator_run_id
# Define Kubernetes pod manifest.
pod_manifest = build_pod_manifest(
pod_name=pod_name,
run_name=args.run_name,
pipeline_name=deployment_config.pipeline_configuration.name,
image_name=image,
command=step_command,
args=step_args,
env=env,
settings=settings,
service_account_name=settings.service_account_name,
mount_local_stores=mount_local_stores,
)
# Create and run pod.
core_api.create_namespaced_pod(
namespace=args.kubernetes_namespace,
body=pod_manifest,
)
# Wait for pod to finish.
logger.info(f"Waiting for pod of step `{step_name}` to start...")
kube_utils.wait_pod(
core_api=core_api,
pod_name=pod_name,
namespace=args.kubernetes_namespace,
exit_condition_lambda=kube_utils.pod_is_done,
stream_logs=True,
)
logger.info(f"Pod of step `{step_name}` completed.")
ThreadedDagRunner(dag=pipeline_dag, run_fn=METHOD_NAME).run()
logger.info("Orchestration pod completed.")
if __name__ == "__main__":
main() | null |
894 | # Copyright 2018,2019,2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from six.moves import range
import os
import nnabla as nn
import nnabla.logger as logger
import nnabla.functions as F
import nnabla.parametric_functions as PF
import nnabla.solvers as S
import nnabla.utils.save as save
def categorical_error(pred, label):
"""
Compute categorical error given score vectors and labels as
numpy.ndarray.
"""
pred_label = pred.argmax(1)
return (pred_label != label.flat).mean()
def cifar10_resnet23_prediction(image, net="teacher", maps=64,
test=False):
"""
Construct ResNet 23
"""
# Residual Unit
def METHOD_NAME(x, scope_name, dn=False):
C = x.shape[1]
with nn.parameter_scope(scope_name):
# Conv -> BN -> Relu
with nn.parameter_scope("conv1"):
h = PF.convolution(x, C / 2, kernel=(1, 1), pad=(0, 0),
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
h = F.relu(h)
# Conv -> BN -> Relu
with nn.parameter_scope("conv2"):
h = PF.convolution(h, C / 2, kernel=(3, 3), pad=(1, 1),
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
h = F.relu(h)
# Conv -> BN
with nn.parameter_scope("conv3"):
h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0),
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
# Residual -> Relu
h = F.relu(h + x)
# Maxpooling
if dn:
h = F.max_pooling(h, kernel=(2, 2), stride=(2, 2))
return h
ncls = 10
with nn.parameter_scope(net):
# Conv -> BN -> Relu
with nn.parameter_scope("conv1"):
# Preprocess
image /= 255.0
if not test:
image = F.image_augmentation(image, contrast=1.0,
angle=0.25,
flip_lr=True)
image.need_grad = False
h = PF.convolution(image, maps, kernel=(3, 3), pad=(1, 1),
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
h = F.relu(h)
h = METHOD_NAME(h, "conv2", False) # -> 32x32
h = METHOD_NAME(h, "conv3", True) # -> 16x16
h = METHOD_NAME(h, "conv4", False) # -> 16x16
h = METHOD_NAME(h, "conv5", True) # -> 8x8
h = METHOD_NAME(h, "conv6", False) # -> 8x8
h = METHOD_NAME(h, "conv7", True) # -> 4x4
h = METHOD_NAME(h, "conv8", False) # -> 4x4
h = F.average_pooling(h, kernel=(4, 4)) # -> 1x1
pred = PF.affine(h, ncls)
return pred
def cifar10_shuffle_prediction(image, maps=64, groups=1, test=False):
"""
Construct ShuffleNet
"""
def shuffle(x):
n, c, h, w = x.shape
g = groups
assert c % g == 0
# N, C, H, W -> N, g, C/g, H, W -> N, C/g, g, H, W -> N, C, H, W
x = F.reshape(x, [n, g, c // g, h, w])
x = F.transpose(x, [0, 2, 1, 3, 4])
x = F.reshape(x, [n, c, h, w])
return x
# Shuffle
def shuffle_unit(x, scope_name, dn=False):
"""
Figure. 2 (b) and (c) in https://arxiv.org/pdf/1707.01083.pdf
"""
C = x.shape[1]
h = x
with nn.parameter_scope(scope_name):
with nn.parameter_scope("gconv1"):
h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0),
group=groups,
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
h = F.relu(h, True)
with nn.parameter_scope("shuffle"): # no meaning but semantics
h = shuffle(h)
with nn.parameter_scope("dconv"):
stride = (2, 2) if dn else (1, 1)
h = PF.depthwise_convolution(h, kernel=(3, 3), pad=(1, 1),
stride=stride,
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
with nn.parameter_scope("gconv2"):
h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0),
group=groups,
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
s = F.average_pooling(x, (2, 2)) if dn else x
h = F.concatenate(*[h, s], axis=1) if dn else h + s
h = F.relu(h)
return h
ncls = 10
# Conv -> BN -> Relu
with nn.parameter_scope("conv1"):
# Preprocess
image /= 255.0
if not test:
image = F.image_augmentation(image, contrast=1.0,
angle=0.25,
flip_lr=True)
image.need_grad = False
h = PF.convolution(image, maps, kernel=(3, 3), pad=(1, 1),
with_bias=False)
h = PF.batch_normalization(h, batch_stat=not test)
h = F.relu(h)
h = shuffle_unit(h, "conv2", False) # -> 32x32
h = shuffle_unit(h, "conv3", True) # -> 16x16
h = shuffle_unit(h, "conv4", False) # -> 16x16
h = shuffle_unit(h, "conv5", True) # -> 8x8
h = shuffle_unit(h, "conv6", False) # -> 8x8
h = shuffle_unit(h, "conv7", True) # -> 4x4
h = shuffle_unit(h, "conv8", False) # -> 4x4
h = F.average_pooling(h, kernel=(4, 4)) # -> 1x1
pred = PF.affine(h, ncls)
return pred | null |
895 | import os
import sys
from os.path import dirname, join, realpath
from typing import Type
from prompt_toolkit.shortcuts import input_dialog, message_dialog
from prompt_toolkit.styles import Style
from hummingbot import root_path
from hummingbot.client.config.conf_migration import migrate_configs, migrate_non_secure_configs_only
from hummingbot.client.config.config_crypt import BaseSecretsManager, store_password_verification
from hummingbot.client.config.security import Security
from hummingbot.client.settings import CONF_DIR_PATH
sys.path.insert(0, str(root_path()))
with open(realpath(join(dirname(__file__), '../../VERSION'))) as version_file:
version = version_file.read().strip()
def login_prompt(secrets_manager_cls: Type[BaseSecretsManager], style: Style):
err_msg = None
secrets_manager = None
if Security.new_password_required():
if legacy_confs_exist():
secrets_manager = migrate_configs_prompt(secrets_manager_cls, style)
else:
METHOD_NAME(style)
password = input_dialog(
title="Set Password",
text="""
Create a password to protect your sensitive data.
This password is not shared with us nor with anyone else, so please store it securely.
Enter your new password:""",
password=True,
style=style).run()
if password is None:
return None
if password == str():
err_msg = "The password must not be empty."
else:
re_password = input_dialog(
title="Set Password",
text="Please re-enter your password:",
password=True,
style=style).run()
if re_password is None:
return None
if password != re_password:
err_msg = "Passwords entered do not match, please try again."
else:
secrets_manager = secrets_manager_cls(password)
store_password_verification(secrets_manager)
else:
password = input_dialog(
title="Welcome back to Hummingbot",
text="Enter your password:",
password=True,
style=style).run()
if password is None:
return None
secrets_manager = secrets_manager_cls(password)
if err_msg is None and not Security.login(secrets_manager):
err_msg = "Invalid password - please try again."
if err_msg is not None:
message_dialog(
title='Error',
text=err_msg,
style=style).run()
return login_prompt(secrets_manager_cls, style)
return secrets_manager
def legacy_confs_exist() -> bool:
encrypted_conf_prefix = "encrypted_"
encrypted_conf_postfix = ".json"
exist = False
for f in sorted(os.listdir(CONF_DIR_PATH)):
f_path = CONF_DIR_PATH / f
if os.path.isfile(f_path) and f.startswith(encrypted_conf_prefix) and f.endswith(encrypted_conf_postfix):
exist = True
break
return exist
def migrate_configs_prompt(secrets_manager_cls: Type[BaseSecretsManager], style: Style) -> BaseSecretsManager:
message_dialog(
title='Configs Migration',
text="""
CONFIGS MIGRATION:
We have recently refactored the way hummingbot handles configurations.
To migrate your legacy configuration files to the new format,
please enter your password on the following screen.
""",
style=style).run()
password = input_dialog(
title="Input Password",
text="\n\nEnter your previous password:",
password=True,
style=style).run()
if password is None:
raise ValueError("Wrong password.")
secrets_manager = secrets_manager_cls(password)
errors = migrate_configs(secrets_manager)
if len(errors) != 0:
_migration_errors_dialog(errors, style)
else:
message_dialog(
title='Configs Migration Success',
text="""
CONFIGS MIGRATION SUCCESS:
The migration process was completed successfully.
""",
style=style).run()
return secrets_manager
def migrate_non_secure_only_prompt(style: Style):
message_dialog(
title='Configs Migration',
text="""
CONFIGS MIGRATION:
We have recently refactored the way hummingbot handles configurations.
We will now attempt to migrate any legacy config files to the new format.
""",
style=style).run()
errors = migrate_non_secure_configs_only()
if len(errors) != 0:
_migration_errors_dialog(errors, style)
else:
message_dialog(
title='Configs Migration Success',
text="""
CONFIGS MIGRATION SUCCESS:
The migration process was completed successfully.
""",
style=style).run()
def _migration_errors_dialog(errors, style: Style):
padding = "\n "
errors_str = padding + padding.join(errors)
message_dialog(
title='Configs Migration Errors',
text=f"""
CONFIGS MIGRATION ERRORS:
{errors_str}
""",
style=style).run()
def METHOD_NAME(style: Style):
message_dialog(
title='Welcome to Hummingbot',
text="""
██╗ ██╗██╗ ██╗███╗ ███╗███╗ ███╗██╗███╗ ██╗ ██████╗ ██████╗ ██████╗ ████████╗
██║ ██║██║ ██║████╗ ████║████╗ ████║██║████╗ ██║██╔════╝ ██╔══██╗██╔═══██╗╚══██╔══╝
███████║██║ ██║██╔████╔██║██╔████╔██║██║██╔██╗ ██║██║ ███╗██████╔╝██║ ██║ ██║
██╔══██║██║ ██║██║╚██╔╝██║██║╚██╔╝██║██║██║╚██╗██║██║ ██║██╔══██╗██║ ██║ ██║
██║ ██║╚██████╔╝██║ ╚═╝ ██║██║ ╚═╝ ██║██║██║ ╚████║╚██████╔╝██████╔╝╚██████╔╝ ██║
╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═════╝ ╚═════╝ ╚═════╝ ╚═╝
=======================================================================================
Version: {version}
Codebase: https://github.com/hummingbot/hummingbot
""".format(version=version),
style=style).run()
message_dialog(
title='Important Warning',
text="""
PLEASE READ THIS CAREFULLY BEFORE USING HUMMINGBOT:
Hummingbot is a free and open source software client that helps you build algorithmic
crypto trading strategies.
Algorithmic crypto trading is a risky activity. You will be building a "bot" that
automatically places orders and trades based on parameters that you set. Please take
the time to understand how each strategy works before you risk real capital with it.
You are solely responsible for the trades that you perform using Hummingbot.
""",
style=style).run()
message_dialog(
title='Important Warning',
text="""
SET A SECURE PASSWORD:
To use Hummingbot, you will need to give it access to your crypto assets by entering
your exchange API keys and/or wallet private keys. These keys are not shared with
anyone, including us.
On the next screen, you will set a password to protect these keys and other sensitive
data. Please store this password safely since there is no way to reset it.
""",
style=style).run() | null |
896 | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Tests for KernelSpecCache."""
import asyncio
import json
import os
import shutil
import sys
import jupyter_core.paths
import pytest
from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel
from enterprise_gateway.services.kernelspecs import KernelSpecCache
# BEGIN - Remove once transition to jupyter_server occurs
def mkdir(tmp_path, *parts):
path = tmp_path.joinpath(*parts)
if not path.exists():
path.mkdir(parents=True)
return path
home_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "home"))
data_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "data"))
config_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "config"))
runtime_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "runtime"))
system_jupyter_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "share", "jupyter"))
env_jupyter_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "env", "share", "jupyter"))
system_config_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "etc", "jupyter"))
env_config_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "env", "etc", "jupyter"))
@pytest.fixture
def environ(
monkeypatch,
tmp_path,
home_dir,
data_dir,
config_dir,
runtime_dir,
system_jupyter_path,
system_config_path,
env_jupyter_path,
env_config_path,
):
monkeypatch.setenv("HOME", str(home_dir))
monkeypatch.setenv("PYTHONPATH", os.pathsep.join(sys.path))
monkeypatch.setenv("JUPYTER_NO_CONFIG", "1")
monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(config_dir))
monkeypatch.setenv("JUPYTER_DATA_DIR", str(data_dir))
monkeypatch.setenv("JUPYTER_RUNTIME_DIR", str(runtime_dir))
monkeypatch.setattr(jupyter_core.paths, "SYSTEM_JUPYTER_PATH", [str(system_jupyter_path)])
monkeypatch.setattr(jupyter_core.paths, "ENV_JUPYTER_PATH", [str(env_jupyter_path)])
monkeypatch.setattr(jupyter_core.paths, "SYSTEM_CONFIG_PATH", [str(system_config_path)])
monkeypatch.setattr(jupyter_core.paths, "ENV_CONFIG_PATH", [str(env_config_path)])
# END - Remove once transition to jupyter_server occurs
kernelspec_json = {
"argv": ["cat", "{connection_file}"],
"display_name": "Test kernel: {kernel_name}",
}
def _install_kernelspec(kernels_dir, kernel_name):
"""install a sample kernel in a kernels directory"""
kernelspec_dir = os.path.join(kernels_dir, kernel_name)
os.makedirs(kernelspec_dir)
json_file = os.path.join(kernelspec_dir, "kernel.json")
named_json = kernelspec_json.copy()
named_json["display_name"] = named_json["display_name"].format(kernel_name=kernel_name)
with open(json_file, "w") as f:
json.dump(named_json, f)
return kernelspec_dir
def METHOD_NAME(kernelspec_dir, kernel_name):
json_file = os.path.join(kernelspec_dir, "kernel.json")
kernel_json = kernelspec_json.copy()
kernel_json["display_name"] = f"{kernel_name} modified!"
with open(json_file, "w") as f:
json.dump(kernel_json, f)
kernelspec_location = pytest.fixture(lambda data_dir: mkdir(data_dir, "kernels"))
other_kernelspec_location = pytest.fixture(
lambda env_jupyter_path: mkdir(env_jupyter_path, "kernels")
)
@pytest.fixture
def setup_kernelspecs(environ, kernelspec_location):
# Only populate factory info
_install_kernelspec(str(kernelspec_location), "test1")
_install_kernelspec(str(kernelspec_location), "test2")
_install_kernelspec(str(kernelspec_location), "test3")
@pytest.fixture
def kernel_spec_manager(environ, setup_kernelspecs):
yield KernelSpecManager(ensure_native_kernel=False)
@pytest.fixture
def kernel_spec_cache(is_enabled, kernel_spec_manager):
kspec_cache = KernelSpecCache.instance(
kernel_spec_manager=kernel_spec_manager, cache_enabled=is_enabled
)
yield kspec_cache
kspec_cache = None
KernelSpecCache.clear_instance()
@pytest.fixture(params=[False, True]) # Add types as needed
def is_enabled(request):
return request.param
async def tests_get_all_specs(kernel_spec_cache):
kspecs = await kernel_spec_cache.get_all_specs()
assert len(kspecs) == 3
async def tests_get_named_spec(kernel_spec_cache):
kspec = await kernel_spec_cache.get_kernel_spec("test2")
assert kspec.display_name == "Test kernel: test2"
async def tests_get_modified_spec(kernel_spec_cache):
kspec = await kernel_spec_cache.get_kernel_spec("test2")
assert kspec.display_name == "Test kernel: test2"
# Modify entry
METHOD_NAME(kspec.resource_dir, "test2")
await asyncio.sleep(0.5) # sleep for a half-second to allow cache to update item
kspec = await kernel_spec_cache.get_kernel_spec("test2")
assert kspec.display_name == "test2 modified!"
async def tests_add_spec(kernel_spec_cache, kernelspec_location, other_kernelspec_location):
assert len(kernel_spec_cache.observed_dirs) == (1 if kernel_spec_cache.cache_enabled else 0)
assert (
str(kernelspec_location) in kernel_spec_cache.observed_dirs
if kernel_spec_cache.cache_enabled
else True
)
_install_kernelspec(str(other_kernelspec_location), "added")
kspec = await kernel_spec_cache.get_kernel_spec("added")
# Ensure new location has been added to observed_dirs
assert len(kernel_spec_cache.observed_dirs) == (2 if kernel_spec_cache.cache_enabled else 0)
assert (
str(other_kernelspec_location) in kernel_spec_cache.observed_dirs
if kernel_spec_cache.cache_enabled
else True
)
assert kspec.display_name == "Test kernel: added"
assert kernel_spec_cache.cache_misses == (1 if kernel_spec_cache.cache_enabled else 0)
# Add another to an existing observed directory, no cache miss here
_install_kernelspec(str(kernelspec_location), "added2")
await asyncio.sleep(
0.5
) # sleep for a half-second to allow cache to add item (no cache miss in this case)
kspec = await kernel_spec_cache.get_kernel_spec("added2")
assert kspec.display_name == "Test kernel: added2"
assert kernel_spec_cache.cache_misses == (1 if kernel_spec_cache.cache_enabled else 0)
async def tests_remove_spec(kernel_spec_cache):
kspec = await kernel_spec_cache.get_kernel_spec("test2")
assert kspec.display_name == "Test kernel: test2"
assert kernel_spec_cache.cache_misses == 0
shutil.rmtree(kspec.resource_dir)
await asyncio.sleep(0.5) # sleep for a half-second to allow cache to remove item
with pytest.raises(NoSuchKernel):
await kernel_spec_cache.get_kernel_spec("test2")
assert kernel_spec_cache.cache_misses == (1 if kernel_spec_cache.cache_enabled else 0)
async def tests_get_missing(kernel_spec_cache):
with pytest.raises(NoSuchKernel):
await kernel_spec_cache.get_kernel_spec("missing")
assert kernel_spec_cache.cache_misses == (1 if kernel_spec_cache.cache_enabled else 0) | null |
897 | # GemRB - Infinity Engine Emulator
# Copyright (C) 2003-2005 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Start.py - intro and main menu screens
###################################################
import GemRB
import GameCheck
from GUIDefines import *
from ie_restype import *
StartWindow = None
JoinGameButton = 0
def OnLoad ():
global JoinGameButton
GemRB.SetVar("ExpansionGame", 0)
skip_videos = GemRB.GetVar ("SkipIntroVideos")
if not skip_videos:
GemRB.PlayMovie ("BISLOGO", 1)
if GameCheck.HasHOW():
GemRB.PlayMovie ("WOTC", 1)
else:
GemRB.PlayMovie ("TSRLOGO", 1)
GemRB.PlayMovie("INTRO", 1)
GemRB.SetVar ("SkipIntroVideos", 1)
if GameCheck.HasHOW():
GemRB.SetMasterScript("BALDUR","WORLDMAP","EXPMAP")
else:
GemRB.SetMasterScript("BALDUR","WORLDMAP")
#main window
global StartWindow
StartWindow = GemRB.LoadWindow (0, "GUICONN")
ProtocolButton = StartWindow.GetControl (0x00)
CreateGameButton = StartWindow.GetControl (0x02)
LoadGameButton = StartWindow.GetControl (0x07)
JoinGameButton = StartWindow.GetControl (0x03)
MoviesButton = StartWindow.GetControl (0x08)
QuitGameButton = StartWindow.GetControl (0x01)
VersionLabel = StartWindow.CreateLabel (0x0fff0000, 0, 0, 640, 30, "REALMS2", "", IE_FONT_SINGLE_LINE | IE_FONT_ALIGN_CENTER)
VersionLabel.SetText (GemRB.Version)
VersionLabel.SetColor ({'r' : 255, 'g' : 255, 'b' : 255})
ProtocolButton.SetStatus (IE_GUI_BUTTON_ENABLED)
CreateGameButton.SetStatus (IE_GUI_BUTTON_ENABLED)
LoadGameButton.SetStatus (IE_GUI_BUTTON_ENABLED)
MoviesButton.SetStatus (IE_GUI_BUTTON_ENABLED)
QuitGameButton.SetStatus (IE_GUI_BUTTON_ENABLED)
LastProtocol = GemRB.GetVar ("Last Protocol Used")
if LastProtocol == 0:
ProtocolButton.SetText (15413)
JoinGameButton.SetStatus (IE_GUI_BUTTON_DISABLED)
elif LastProtocol == 1:
ProtocolButton.SetText (13967)
JoinGameButton.SetStatus (IE_GUI_BUTTON_ENABLED)
elif LastProtocol == 2:
ProtocolButton.SetText (13968)
JoinGameButton.SetStatus (IE_GUI_BUTTON_ENABLED)
CreateGameButton.SetText (13963)
LoadGameButton.SetText (13729)
JoinGameButton.SetText (13964)
MoviesButton.SetText (15415)
QuitGameButton.SetText (13731)
QuitGameButton.OnPress (QuitPress)
QuitGameButton.MakeEscape ()
ProtocolButton.OnPress (ProtocolPress)
MoviesButton.OnPress (MoviesPress)
LoadGameButton.OnPress (LoadPress)
CreateGameButton.OnPress (CreatePress)
StartWindow.Focus ()
GemRB.LoadMusicPL("Theme.mus",1)
StartWindow.SetAction(RefreshProtocol, ACTION_WINDOW_FOCUS_GAINED)
return
def ProtocolPress ():
ProtocolWindow = GemRB.LoadWindow (1, "GUICONN")
#Disabling Unused Buttons in this Window
Button = ProtocolWindow.GetControl (2)
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Button = ProtocolWindow.GetControl (3)
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Button = ProtocolWindow.GetControl (9)
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
SinglePlayerButton = ProtocolWindow.GetControl (10)
SinglePlayerButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
SinglePlayerButton.SetText (15413)
IPXButton = ProtocolWindow.GetControl (0)
IPXButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
IPXButton.SetText (13967)
TCPIPButton = ProtocolWindow.GetControl (1)
TCPIPButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
TCPIPButton.SetText (13968)
SinglePlayerButton.SetVarAssoc ("Last Protocol Used", 0)
IPXButton.SetVarAssoc ("Last Protocol Used", 1)
TCPIPButton.SetVarAssoc ("Last Protocol Used", 2)
TextArea = ProtocolWindow.GetControl (7)
TextArea.SetText (11316)
DoneButton = ProtocolWindow.GetControl (6)
DoneButton.SetText (11973)
DoneButton.OnPress (ProtocolWindow.Close)
DoneButton.MakeEscape()
ProtocolWindow.ShowModal (1)
return
def RefreshProtocol (win):
ProtocolButton = win.GetControl (0)
LastProtocol = GemRB.GetVar ("Last Protocol Used")
if LastProtocol == 0:
ProtocolButton.SetText (15413)
JoinGameButton.SetStatus (IE_GUI_BUTTON_DISABLED)
elif LastProtocol == 1:
ProtocolButton.SetText (13967)
JoinGameButton.SetStatus (IE_GUI_BUTTON_ENABLED)
elif LastProtocol == 2:
ProtocolButton.SetText (13968)
JoinGameButton.SetStatus (IE_GUI_BUTTON_ENABLED)
return
def CreatePress ():
global GameTypeWindow, ExpansionType
if not GameCheck.HasHOW():
GameTypeReallyDonePress()
GameTypeWindow = GemRB.LoadWindow (24, "GUICONN")
CancelButton = GameTypeWindow.GetControl (1)
CancelButton.SetText (13727)
CancelButton.OnPress (GameTypeWindow.Close)
CancelButton.MakeEscape()
DoneButton = GameTypeWindow.GetControl (2)
DoneButton.SetText (11973)
DoneButton.OnPress (lambda: GameTypeDonePress(GameTypeWindow))
DoneButton.MakeDefault()
FullGameButton = GameTypeWindow.GetControl (4)
FullGameButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
FullGameButton.OnPress (lambda btn: METHOD_NAME (btn, False))
FullGameButton.SetText (24869)
ExpansionGameButton = GameTypeWindow.GetControl (5)
ExpansionGameButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
ExpansionGameButton.OnPress (lambda btn: METHOD_NAME (btn, True))
ExpansionGameButton.SetText (24871)
ExpansionGameButton.SetVarAssoc ("ExpansionGame", 1)
FullGameButton.SetVarAssoc ("ExpansionGame", 0)
METHOD_NAME (None, False)
GameTypeWindow.ShowModal (1)
return
def METHOD_NAME(btn, isExpansion):
GameTypeTextArea = GameTypeWindow.GetControl(3)
if isExpansion:
GameTypeTextArea.SetText (24872)
else:
GameTypeTextArea.SetText (24870)
return
def GameTypeDonePress(win):
#todo: importing team members from final save (string 26317)?
ExpansionGame = GemRB.GetVar("ExpansionGame")
if ExpansionGame == 0: #start in Easthaven
win.Close()
GameTypeReallyDonePress()
elif ExpansionGame == 1: #show a warning message first
GameType2Window = GemRB.LoadWindow (25)
TextArea = GameType2Window.GetControl(0)
TextArea.SetText(26318)
def confirm():
win.Close()
GameType2Window.Close()
GameTypeReallyDonePress()
YesButton = GameType2Window.GetControl (1)
YesButton.SetText (13912)
YesButton.OnPress (confirm)
NoButton = GameType2Window.GetControl (2)
NoButton.SetText (13913)
NoButton.OnPress (GameType2Window.Close)
NoButton.MakeEscape()
def cancel():
win.Close();
GameType2Window.Close()
CancelButton = GameType2Window.GetControl (3)
CancelButton.SetText (13727)
CancelButton.OnPress (cancel)
CancelButton.MakeEscape()
GameType2Window.ShowModal(1)
def GameTypeReallyDonePress():
StartWindow.Close()
GemRB.LoadGame(None)
GemRB.SetNextScript ("PartyFormation")
def LoadPress ():
GemRB.SetNextScript ("GUILOAD")
return
def MoviesPress ():
GemRB.SetNextScript ("GUIMOVIE")
return
def QuitPress ():
QuitWindow = GemRB.LoadWindow (22, "GUICONN")
CancelButton = QuitWindow.GetControl (2)
CancelButton.SetText (13727)
CancelButton.OnPress (QuitWindow.Close)
CancelButton.MakeEscape()
QuitButton = QuitWindow.GetControl (1)
QuitButton.SetText (15417)
QuitButton.OnPress (lambda: GemRB.Quit())
QuitButton.MakeDefault()
TextArea = QuitWindow.GetControl (0)
TextArea.SetText (19532)
QuitWindow.ShowModal (1)
return | null |
898 | ## @file
# Module that encodes and decodes a EFI_CAPSULE_HEADER with a payload
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
UefiCapsuleHeader
'''
import struct
import uuid
class UefiCapsuleHeaderClass (object):
# typedef struct {
# ///
# /// A GUID that defines the contents of a capsule.
# ///
# EFI_GUID CapsuleGuid;
# ///
# /// The size of the capsule header. This may be larger than the size of
# /// the EFI_CAPSULE_HEADER since CapsuleGuid may imply
# /// extended header entries
# ///
# UINT32 HeaderSize;
# ///
# /// Bit-mapped list describing the capsule attributes. The Flag values
# /// of 0x0000 - 0xFFFF are defined by CapsuleGuid. Flag values
# /// of 0x10000 - 0xFFFFFFFF are defined by this specification
# ///
# UINT32 Flags;
# ///
# /// Size in bytes of the capsule.
# ///
# UINT32 CapsuleImageSize;
# } EFI_CAPSULE_HEADER;
#
# #define CAPSULE_FLAGS_PERSIST_ACROSS_RESET 0x00010000
# #define CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE 0x00020000
# #define CAPSULE_FLAGS_INITIATE_RESET 0x00040000
#
_StructFormat = '<16sIIII'
_StructSize = struct.calcsize (_StructFormat)
EFI_FIRMWARE_MANAGEMENT_CAPSULE_ID_GUID = uuid.UUID ('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A')
_CAPSULE_FLAGS_PERSIST_ACROSS_RESET = 0x00010000
_CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE = 0x00020000
_CAPSULE_FLAGS_INITIATE_RESET = 0x00040000
def __init__ (self):
self._Valid = False
self.CapsuleGuid = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_ID_GUID
self.HeaderSize = self._StructSize
self.OemFlags = 0x0000
self.PersistAcrossReset = False
self.PopulateSystemTable = False
self.InitiateReset = False
self.CapsuleImageSize = self.HeaderSize
self.Payload = b''
def Encode (self):
Flags = self.OemFlags
if self.PersistAcrossReset:
Flags = Flags | self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET
if self.PopulateSystemTable:
Flags = Flags | self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE
if self.InitiateReset:
Flags = Flags | self._CAPSULE_FLAGS_INITIATE_RESET
self.CapsuleImageSize = self.HeaderSize + len (self.Payload)
UefiCapsuleHeader = struct.pack (
self._StructFormat,
self.CapsuleGuid.bytes_le,
self.HeaderSize,
Flags,
self.CapsuleImageSize,
0
)
self._Valid = True
return UefiCapsuleHeader + self.Payload
def METHOD_NAME (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(CapsuleGuid, HeaderSize, Flags, CapsuleImageSize, Reserved) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if HeaderSize < self._StructSize:
raise ValueError
if CapsuleImageSize != len (Buffer):
raise ValueError
self.CapsuleGuid = uuid.UUID (bytes_le = CapsuleGuid)
self.HeaderSize = HeaderSize
self.OemFlags = Flags & 0xffff
self.PersistAcrossReset = (Flags & self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET) != 0
self.PopulateSystemTable = (Flags & self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE) != 0
self.InitiateReset = (Flags & self._CAPSULE_FLAGS_INITIATE_RESET) != 0
self.CapsuleImageSize = CapsuleImageSize
self.Payload = Buffer[self.HeaderSize:]
self._Valid = True
return self.Payload
def DumpInfo (self):
if not self._Valid:
raise ValueError
Flags = self.OemFlags
if self.PersistAcrossReset:
Flags = Flags | self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET
if self.PopulateSystemTable:
Flags = Flags | self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE
if self.InitiateReset:
Flags = Flags | self._CAPSULE_FLAGS_INITIATE_RESET
print ('EFI_CAPSULE_HEADER.CapsuleGuid = {Guid}'.format (Guid = str(self.CapsuleGuid).upper()))
print ('EFI_CAPSULE_HEADER.HeaderSize = {Size:08X}'.format (Size = self.HeaderSize))
print ('EFI_CAPSULE_HEADER.Flags = {Flags:08X}'.format (Flags = Flags))
print (' OEM Flags = {Flags:04X}'.format (Flags = self.OemFlags))
if self.PersistAcrossReset:
print (' CAPSULE_FLAGS_PERSIST_ACROSS_RESET')
if self.PopulateSystemTable:
print (' CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE')
if self.InitiateReset:
print (' CAPSULE_FLAGS_INITIATE_RESET')
print ('EFI_CAPSULE_HEADER.CapsuleImageSize = {Size:08X}'.format (Size = self.CapsuleImageSize))
print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload))) | null |
899 | from json import dumps as json_dump
from django.contrib.contenttypes.models import ContentType
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.translation import gettext as _
from creme.creme_config.bricks import EntityFiltersBrick
from creme.creme_core.core.entity_filter import operators
from creme.creme_core.core.entity_filter.condition_handler import (
RegularFieldConditionHandler,
)
from creme.creme_core.models import EntityFilter, FakeContact
from creme.creme_core.tests.base import CremeTestCase, skipIfNotInstalled
from creme.creme_core.tests.views.base import BrickTestCaseMixin
class EntityFilterConfigTestCase(BrickTestCaseMixin, CremeTestCase):
@staticmethod
def _build_add_url(ct):
return reverse('creme_config__create_efilter', args=(ct.id,))
@staticmethod
def METHOD_NAME(efilter):
return reverse('creme_config__edit_efilter', args=(efilter.id,))
@staticmethod
def _build_rfields_data(name, operator, value):
return json_dump([{
'field': {'name': name},
'operator': {'id': str(operator)},
'value': value,
}])
def _ctype_labels_from_brick(self, response):
brick_node = self.get_brick_node(
self.get_html_tree(response.content), brick=EntityFiltersBrick,
)
return [
ct_group[0].tail.strip()
for ct_group in brick_node.findall(
'.//div[@class="entityfilter-config-group-title"]'
)
]
def test_portal01(self):
"Super-user."
self.login_as_root()
response = self.assertGET200(reverse('creme_config__efilters'))
self.assertTemplateUsed(response, 'creme_config/portals/entity-filter.html')
self.assertEqual(
reverse('creme_core__reload_bricks'),
response.context.get('bricks_reload_url'),
)
ct_labels = self._ctype_labels_from_brick(response)
if len(ct_labels) < EntityFiltersBrick.page_size:
self.assertIn(FakeContact._meta.verbose_name, ct_labels)
@skipIfNotInstalled('creme.documents')
def test_portal02(self):
"Not super-user."
from creme import documents
self.login_as_standard(allowed_apps=('documents',))
response = self.assertGET200(reverse('creme_config__efilters'))
self.assertCountEqual(
self._ctype_labels_from_brick(response),
[
model._meta.verbose_name
for model in (documents.get_document_model(), documents.get_folder_model())
],
)
@override_settings(FILTERS_INITIAL_PRIVATE=False)
def test_create01(self):
"Check app credentials."
user = self.login_as_standard(allowed_apps=('documents',))
ct = ContentType.objects.get_for_model(FakeContact)
uri = self._build_add_url(ct)
self.assertGET403(uri)
role = user.role
role.allowed_apps = ['documents', 'creme_core']
role.save()
response1 = self.assertGET200(uri)
context1 = response1.context
self.assertEqual(
_('Create a filter for «{model}»').format(model='Test Contact'),
context1.get('title'),
)
with self.assertNoException():
form = context1['form']
# NB: difficult to test the content in a robust way (depends on the DB config)
context1['help_message'] # NOQA
self.assertIs(form.initial.get('is_private'), False)
name = 'Filter 01'
operator = operators.IEQUALS
field_name = 'last_name'
value = 'Ikari'
response = self.client.post(
uri,
data={
'name': name,
'use_or': 'False',
'regularfieldcondition': self._build_rfields_data(
operator=operator,
name=field_name,
value=value,
),
},
)
self.assertNoFormError(response)
efilter = self.get_object_or_fail(EntityFilter, name=name)
self.assertEqual(ct, efilter.entity_type)
self.assertTrue(efilter.is_custom)
self.assertFalse(efilter.is_private)
self.assertIsNone(efilter.user)
self.assertFalse(efilter.use_or)
condition = self.get_alone_element(efilter.conditions.all())
self.assertEqual(RegularFieldConditionHandler.type_id, condition.type)
self.assertEqual(field_name, condition.name)
self.assertDictEqual(
{'operator': operator, 'values': [value]},
condition.value,
)
@override_settings(FILTERS_INITIAL_PRIVATE=True)
def test_create02(self):
self.login_as_standard()
context = self.assertGET200(
self._build_add_url(ContentType.objects.get_for_model(FakeContact))
).context
with self.assertNoException():
form = context['form']
self.assertIs(form.initial.get('is_private'), True)
def test_edit01(self):
self.login_as_root()
name = 'My filter'
efilter = EntityFilter.objects.smart_update_or_create(
'test-filter', name, FakeContact, is_custom=True,
conditions=[
RegularFieldConditionHandler.build_condition(
model=FakeContact, field_name='first_name',
operator=operators.CONTAINS, values=['Atom'],
),
],
)
url = self.METHOD_NAME(efilter)
context1 = self.assertGET200(url).context
self.assertEqual(
_('Edit «{object}»').format(object=efilter.name),
context1.get('title'),
)
with self.assertNoException():
submit_label = context1['submit_label']
# NB: difficult to test the content in a robust way (depends on the DB config)
context1['help_message'] # NOQA
self.assertEqual(_('Save the filter'), submit_label)
# ---
name += ' (edited)'
field_operator = operators.IEQUALS
field_name = 'last_name'
field_value = 'Ikari'
response2 = self.client.post(
url, follow=True,
data={
'name': name,
'use_or': 'True',
'regularfieldcondition': self._build_rfields_data(
operator=field_operator,
name=field_name,
value=field_value,
),
},
)
self.assertNoFormError(response2)
efilter = self.refresh(efilter)
self.assertEqual(name, efilter.name)
self.assertIs(efilter.is_custom, True)
self.assertIsNone(efilter.user)
condition = self.get_alone_element(efilter.conditions.order_by('id'))
self.assertEqual(RegularFieldConditionHandler.type_id, condition.type)
self.assertEqual(field_name, condition.name)
self.assertDictEqual(
{'operator': field_operator, 'values': [field_value]},
condition.value,
)
def test_edit02(self):
"Can not edit Filter that belongs to another user."
self.login_as_standard(allowed_apps=('creme_core',))
efilter = EntityFilter.objects.smart_update_or_create(
'test-filter01', 'Filter01', FakeContact, user=self.get_root_user(), is_custom=True,
)
self.assertGET403(self.METHOD_NAME(efilter)) | null |