id
int64 0
6k
| code
stringlengths 4k
8k
| code_compressed
null |
---|---|---|
1,100 | #
# Copyright (c) 2015 - 2023, Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
#
from geopmdpy.gffi import gffi
from geopmdpy.gffi import get_dl_geopm
from geopmdpy import error
import json
gffi.cdef("""
int geopm_agent_supported(const char *agent_name);
int geopm_agent_num_policy(const char *agent_name,
int *num_policy);
int geopm_agent_policy_name(const char *agent_name,
int policy_idx,
size_t policy_name_max,
char *policy_name);
int geopm_agent_policy_json(const char *agent_name,
const double *policy_array,
size_t json_string_max,
char *json_string);
int geopm_agent_num_sample(const char *agent_name,
int *num_sample);
int geopm_agent_sample_name(const char *agent_name,
int sample_idx,
size_t sample_name_max,
char *sample_name);
int geopm_agent_num_avail(int *num_agent);
int geopm_agent_name(int agent_idx,
size_t agent_name_max,
char *agent_name);
int geopm_agent_enforce_policy(void);
""")
_dl = get_dl_geopm()
_name_max = 1024
_policy_max = 8192
def policy_names(agent_name):
"""Get the names of the policies for a given agent.
Args:
agent_name (str): Name of agent type.
Returns:
list[str]: Policy names required for the agent configuration.
"""
agent_name_cstr = gffi.new("char[]", agent_name.encode())
num_policy = gffi.new("int *")
err = _dl.geopm_agent_num_policy(agent_name_cstr, num_policy)
if err < 0:
raise RuntimeError("geopm_agent_num_policy() failed: {}".format(
error.message(err)))
result = []
for policy_idx in range(num_policy[0]):
buff = gffi.new("char[]", _name_max)
err = _dl.geopm_agent_policy_name(agent_name_cstr, policy_idx, _name_max, buff)
if err < 0:
raise RuntimeError("geopm_agent_policy_name() failed: {}".format(
error.message(err)))
result.append(gffi.string(buff).decode())
return result
def policy_json(agent_name, policy_values):
"""Create a JSON policy for the given agent.
This can be written to a file to control the agent statically.
Args:
agent_name (str): Name of agent type.
policy_values (list[float]): Values to use for each respective policy field.
Returns:
str: JSON str containing a valid policy using the given values.
"""
agent_name_cstr = gffi.new("char[]", agent_name.encode())
policy_array = gffi.new("double[]", policy_values)
json_string = gffi.new("char[]", _policy_max)
err = _dl.geopm_agent_policy_json(agent_name_cstr, policy_array,
_policy_max, json_string)
if err < 0:
raise RuntimeError("geopm_agent_policy_json() failed: {}".format(
error.message(err)))
return gffi.string(json_string).decode()
def sample_names(agent_name):
"""Get all samples produced by the given agent.
Args:
agent_name (str): Name of agent type.
Returns:
list[str]: List of sample names.
"""
agent_name_cstr = gffi.new("char[]", agent_name.encode())
num_sample = gffi.new("int *")
err = _dl.geopm_agent_num_sample(agent_name_cstr, num_sample)
if err < 0:
raise RuntimeError("geopm_agent_num_sample() failed: {}".format(
error.message(err)))
result = []
for sample_idx in range(num_sample[0]):
buff = gffi.new("char[]", _name_max)
err = _dl.geopm_agent_sample_name(agent_name_cstr, sample_idx, _name_max, buff)
if err < 0:
raise RuntimeError("geopm_agent_sample_name() failed: {}".format(
error.message(err)))
result.append(gffi.string(buff).decode())
return result
def names():
"""Get the list of all available agents.
Returns:
list[str]: List of all agent names.
"""
num_agent = gffi.new("int *")
err = _dl.geopm_agent_num_avail(num_agent)
if err < 0:
raise RuntimeError("geopm_agent_num_avail() failed: {}".format(
error.message(err)))
buff = gffi.new("char[]", _name_max)
result = []
for agent_idx in range(num_agent[0]):
err = _dl.geopm_agent_name(agent_idx, _name_max, buff)
if err < 0:
raise RuntimeError("geopm_agent_name() failed: {}".format(
error.message(err)))
result.append(gffi.string(buff).decode())
return result
def enforce_policy():
"""Enforce a static implementation of the agent's policy. The agent
and the policy are chosen based on the GEOPM environment
variables and configuration files.
"""
err = _dl.geopm_agent_enforce_policy()
if err < 0:
raise RuntimeError("geopm_agent_enforce_policy() failed: {}".format(
error.message(err)))
class AgentConf(object):
"""The GEOPM agent configuration parameters.
This class contains all the parameters necessary to run the GEOPM
agent with a workload.
Attributes:
path: The output path for this configuration file.
options: A dict of the options for this agent.
"""
def __init__(self, path, agent='monitor', options=dict()):
self._path = path
if agent not in names():
raise SyntaxError('<geopm> geopmpy.io: AgentConf does not support agent type: ' + agent + '!')
self._agent = agent
self._options = options
def __repr__(self):
return json.dumps(self._options)
def __str__(self):
return self.__repr__()
def get_path(self):
return self._path
def get_agent(self):
return self._agent
def METHOD_NAME(self):
"""Write the current config to a file."""
policy_items = policy_names(self._agent)
name_offsets = { name: offset for offset, name in enumerate(policy_items)}
policy_values = [float('nan')] * len(name_offsets)
# Earlier versions of this function had special handling per agent instead
# of using the agent's policy names. Translate the old-style inputs to
# use the new style for backward compatibility.
old_names = []
if self._agent in ['power_governor', 'power_balancer']:
old_names = ['power_budget']
elif self._agent in ['frequency_map']:
old_names = ['frequency_min', 'frequency_max']
policy_dict = self._options.copy()
for offset, name in enumerate(old_names):
if name in policy_dict:
policy_dict[policy_items[offset]] = policy_dict.pop(name)
for (policy_name, policy_value) in policy_dict.items():
if policy_name not in name_offsets:
raise KeyError('<geopm> geopmpy.io: Policy "{}" does not exist in agent "{}"'.format(policy_name, self._agent))
policy_offset = name_offsets[policy_name]
policy_values[policy_offset] = policy_value
with open(self._path, "w") as outfile:
outfile.METHOD_NAME(policy_json(self._agent, policy_values)) | null |
1,101 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2021-2022 Valory AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Helpers for Pytest tests with asynchronous programming."""
import asyncio
import logging
import time
from asyncio import AbstractEventLoop
from threading import Thread
from typing import Any, Callable, Coroutine, Generator, Optional, Union
def wait_for_condition(
condition_checker: Callable,
timeout: int = 2,
error_msg: str = "Timeout",
period: float = 0.001,
) -> None:
"""Wait for condition occures in selected timeout."""
start_time = time.time()
while not condition_checker():
time.sleep(period)
if time.time() > start_time + timeout:
raise TimeoutError(error_msg)
class AnotherThreadTask:
"""
Schedule a task to run on the loop in another thread.
Provides better cancel behaviour: on cancel it will wait till cancelled completely.
"""
def __init__(
self,
coro: Union[Coroutine[Any, Any, Any], Generator[Any, None, Any]],
loop: AbstractEventLoop,
) -> None:
"""
Init the task.
:param coro: coroutine to schedule
:param loop: an event loop to schedule on.
"""
self._loop = loop
self._coro = coro
self._task: Optional[asyncio.Task] = None
self._future = asyncio.run_coroutine_threadsafe(self._get_task_result(), loop)
async def _get_task_result(self) -> Any:
"""
Get task result, should be run in target loop.
:return: task result value or raise an exception if task failed
"""
self._task = self._loop.create_task(self._coro)
return await self._task
def result(self, timeout: Optional[float] = None) -> Any:
"""
Wait for coroutine execution result.
:param timeout: optional timeout to wait in seconds.
:return: result
"""
return self._future.result(timeout)
def cancel(self) -> None:
"""Cancel coroutine task execution in a target loop."""
if self._task is None:
self._loop.call_soon_threadsafe(self._future.cancel)
else:
self._loop.call_soon_threadsafe(self._task.cancel)
def done(self) -> bool:
"""Check task is done."""
return self._future.done()
class ThreadedAsyncRunner(Thread):
"""Util to run thread with event loop and execute coroutines inside."""
def __init__(self, loop: Optional[AbstractEventLoop] = None) -> None:
"""
Init threaded runner.
:param loop: optional event loop. is it's running loop, threaded runner will use it.
"""
self._loop = loop or asyncio.new_event_loop()
if self._loop.is_closed():
raise ValueError("Event loop closed.") # pragma: nocover
super().__init__(daemon=True)
def start(self) -> None:
"""Start event loop in dedicated thread."""
if self.is_alive() or self._loop.is_running(): # pragma: nocover
return
super().start()
self.call(asyncio.sleep(0.001)).result(1)
def run(self) -> None:
"""Run code inside thread."""
logging.debug("Starting threaded asyncio loop...")
asyncio.set_event_loop(self._loop)
self._loop.run_forever()
logging.debug("Asyncio loop has been stopped.")
def call(
self, coro: Union[Coroutine[Any, Any, Any], Generator[Any, None, Any]]
) -> Any:
"""
Run a coroutine inside the event loop.
:param coro: a coroutine to run.
:return: task
"""
return AnotherThreadTask(coro, self._loop)
def stop(self) -> None:
"""Stop event loop in thread."""
logging.debug("Stopping...")
if not self.is_alive(): # pragma: nocover
return
if self._loop.is_running():
logging.debug("Stopping loop...")
self._loop.call_soon_threadsafe(self._loop.stop)
logging.debug("Wait thread to join...")
self.join(10)
logging.debug("Stopped.")
class BaseThreadedAsyncLoop:
"""Test class with a threaded event loop running."""
DEFAULT_ASYNC_TIMEOUT = 5.0
loop: ThreadedAsyncRunner
thread: Thread
def setup(self) -> None:
"""Set up the class."""
self.loop = ThreadedAsyncRunner()
self.loop.start()
def execute(
self,
coro: Union[Coroutine[Any, Any, Any], Generator[Any, None, Any]],
timeout: float = DEFAULT_ASYNC_TIMEOUT,
) -> Any:
"""Execute a coroutine and wait its completion."""
task: AnotherThreadTask = self.loop.call(coro)
return task.result(timeout=timeout)
def METHOD_NAME(self) -> None:
"""Teardown the class."""
self.loop.start()
self.loop.join(5.0) | null |
1,102 | from json import dumps
from urllib.parse import quote_plus
from flask import (
request, render_template, url_for, jsonify
)
from pypuppetdb.QueryBuilder import (AndOperator,
EqualsOperator)
from puppetboard.core import get_app, get_puppetdb, environments
from puppetboard.utils import (check_env, get_or_abort, parse_python)
app = get_app()
puppetdb = get_puppetdb()
@app.route('/fact/<fact>/json',
defaults={'env': app.config['DEFAULT_ENVIRONMENT'],
'node': None, 'value': None})
@app.route('/<env>/fact/<fact>/json', defaults={'node': None, 'value': None})
@app.route('/fact/<fact>/<value>/json',
defaults={'env': app.config['DEFAULT_ENVIRONMENT'], 'node': None})
@app.route('/fact/<fact>/<path:value>/json',
defaults={'env': app.config['DEFAULT_ENVIRONMENT'], 'node': None})
@app.route('/<env>/fact/<fact>/<value>/json', defaults={'node': None})
@app.route('/node/<node>/facts/json',
defaults={'env': app.config['DEFAULT_ENVIRONMENT'],
'fact': None, 'value': None})
@app.route('/<env>/node/<node>/facts/json',
defaults={'fact': None, 'value': None})
def fact_ajax(env, node, fact, value):
"""Fetches the specific facts matching (node/fact/value) from PuppetDB and
return a JSON table
:param env: Searches for facts in this environment
:type env: :obj:`string`
:param node: Find all facts for this node
:type node: :obj:`string`
:param fact: Find all facts with this name
:type fact: :obj:`string`
:param value: Filter facts whose value is equal to this
:type value: :obj:`string`
"""
draw = int(request.args.get('draw', 0))
envs = environments()
check_env(env, envs)
render_graph = False
if fact is not None and fact in app.config['GRAPH_FACTS'] and value is None and node is None:
render_graph = True
query = AndOperator()
if node is not None:
query.add(EqualsOperator("certname", node))
if env != '*':
query.add(EqualsOperator("environment", env))
if value is not None:
# interpret the value as a proper type...
value = parse_python(value)
# ...to know if it should be quoted or not in the query to PuppetDB
# (f.e. a string should, while a number should not)
query.add(EqualsOperator('value', value))
# if we have not added any operations to the query,
# then make it explicitly empty
if len(query.operations) == 0:
query = None
METHOD_NAME = [f for f in get_or_abort(
puppetdb.METHOD_NAME,
name=fact,
query=query)]
total = len(METHOD_NAME)
counts = {}
json = {
'draw': draw,
'recordsTotal': total,
'recordsFiltered': total,
'data': []}
for fact_h in METHOD_NAME:
line = []
if fact is None:
line.append(fact_h.name)
if node is None:
line.append('<a href="{0}">{1}</a>'.format(
url_for('node', env=env, node_name=fact_h.node),
fact_h.node))
if value is None:
if isinstance(fact_h.value, str):
# https://github.com/voxpupuli/puppetboard/issues/706
# Force quotes around string values
# This lets plain int values that are stored as strings in the db
# be findable when searched via the facts page
value_for_url = '"' + quote_plus(fact_h.value) + '"'
else:
value_for_url = fact_h.value
line.append('["{0}", {1}]'.format(
url_for(
'fact', env=env, fact=fact_h.name, value=value_for_url),
dumps(fact_h.value)))
json['data'].append(line)
if render_graph:
if fact_h.value not in counts:
counts[fact_h.value] = 0
counts[fact_h.value] += 1
if render_graph:
json['chart'] = [
{"label": "{0}".format(k).replace('\n', ' '),
"value": counts[k]}
for k in sorted(counts, key=lambda k: counts[k], reverse=True)]
return jsonify(json)
@app.route('/facts', defaults={'env': app.config['DEFAULT_ENVIRONMENT']})
@app.route('/<env>/facts')
def METHOD_NAME(env):
"""Displays an alphabetical list of all facts currently known to
PuppetDB.
:param env: Serves no purpose for this function, only for consistency's
sake
:type env: :obj:`string`
"""
envs = environments()
check_env(env, envs)
METHOD_NAME = get_or_abort(puppetdb.fact_names)
# we consider a column label to count for ~5 lines
column_label_height = 5
# 1 label per different letter and up to 3 more labels for letters spanning
# multiple columns.
column_label_count = 3 + len(set(map(lambda fact: fact[0].upper(), METHOD_NAME)))
break_size = (len(METHOD_NAME) + column_label_count * column_label_height) / 4.0
next_break = break_size
facts_columns = []
facts_current_column = []
facts_current_letter = []
letter = None
count = 0
for fact in METHOD_NAME:
count += 1
if count > next_break:
next_break += break_size
if facts_current_letter:
facts_current_column.append(facts_current_letter)
if facts_current_column:
facts_columns.append(facts_current_column)
facts_current_column = []
facts_current_letter = []
letter = None
if letter != fact[0].upper():
if facts_current_letter:
facts_current_column.append(facts_current_letter)
facts_current_letter = []
letter = fact[0].upper()
count += column_label_height
facts_current_letter.append(fact)
if facts_current_letter:
facts_current_column.append(facts_current_letter)
if facts_current_column:
facts_columns.append(facts_current_column)
return render_template('facts.html',
facts_columns=facts_columns,
envs=envs,
current_env=env)
@app.route('/fact/<fact>',
defaults={'env': app.config['DEFAULT_ENVIRONMENT'], 'value': None})
@app.route('/<env>/fact/<fact>', defaults={'value': None})
@app.route('/fact/<fact>/<value>',
defaults={'env': app.config['DEFAULT_ENVIRONMENT']})
@app.route('/<env>/fact/<fact>/<value>')
def fact(env, fact, value):
"""Fetches the specific fact(/value) from PuppetDB and displays per
node for which this fact is known.
:param env: Searches for facts in this environment
:type env: :obj:`string`
:param fact: Find all facts with this name
:type fact: :obj:`string`
:param value: Find all facts with this value
:type value: :obj:`string`
"""
envs = environments()
check_env(env, envs)
render_graph = False
if fact in app.config['GRAPH_FACTS'] and not value:
render_graph = True
value_json = value
if value is not None:
value_object = parse_python(value)
if type(value_object) is str:
value_json = value_object
else:
value_json = dumps(value_object)
natural_time_delta_sort = False
if fact in ["uptime"]:
natural_time_delta_sort = True
return render_template(
'fact.html',
fact=fact,
value=value,
value_json=value_json,
render_graph=render_graph,
envs=envs,
current_env=env,
natural_time_delta_sort=natural_time_delta_sort
) | null |
1,103 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkeipanycast.endpoint import endpoint_data
class ListAnycastEipAddressesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Eipanycast', '2020-03-09', 'ListAnycastEipAddresses','eipanycast')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def METHOD_NAME(self): # String
return self.get_query_params().get('InstanceChargeType')
def set_InstanceChargeType(self, InstanceChargeType): # String
self.add_query_param('InstanceChargeType', InstanceChargeType)
def get_BusinessStatus(self): # String
return self.get_query_params().get('BusinessStatus')
def set_BusinessStatus(self, BusinessStatus): # String
self.add_query_param('BusinessStatus', BusinessStatus)
def get_ServiceLocation(self): # String
return self.get_query_params().get('ServiceLocation')
def set_ServiceLocation(self, ServiceLocation): # String
self.add_query_param('ServiceLocation', ServiceLocation)
def get_AnycastEipAddress(self): # String
return self.get_query_params().get('AnycastEipAddress')
def set_AnycastEipAddress(self, AnycastEipAddress): # String
self.add_query_param('AnycastEipAddress', AnycastEipAddress)
def get_AnycastIdss(self): # RepeatList
return self.get_query_params().get('AnycastIds')
def set_AnycastIdss(self, AnycastIds): # RepeatList
for depth1 in range(len(AnycastIds)):
self.add_query_param('AnycastIds.' + str(depth1 + 1), AnycastIds[depth1])
def get_Tagss(self): # RepeatList
return self.get_query_params().get('Tags')
def set_Tagss(self, Tags): # RepeatList
for depth1 in range(len(Tags)):
if Tags[depth1].get('Key') is not None:
self.add_query_param('Tags.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
if Tags[depth1].get('Value') is not None:
self.add_query_param('Tags.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
def get_InternetChargeType(self): # String
return self.get_query_params().get('InternetChargeType')
def set_InternetChargeType(self, InternetChargeType): # String
self.add_query_param('InternetChargeType', InternetChargeType)
def get_AnycastId(self): # String
return self.get_query_params().get('AnycastId')
def set_AnycastId(self, AnycastId): # String
self.add_query_param('AnycastId', AnycastId)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_BindInstanceIdss(self): # RepeatList
return self.get_query_params().get('BindInstanceIds')
def set_BindInstanceIdss(self, BindInstanceIds): # RepeatList
for depth1 in range(len(BindInstanceIds)):
self.add_query_param('BindInstanceIds.' + str(depth1 + 1), BindInstanceIds[depth1])
def get_MaxResults(self): # Integer
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Integer
self.add_query_param('MaxResults', MaxResults)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status) | null |
1,104 | #! /usr/bin/python
#
# Copyright (c) 2017 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import PlotPowerStates as plotter
import argparse
import os
from subprocess import call
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("--statsfile", required=True, help="stats file path")
parser.add_argument(
"--bankutils",
default="b1 b2 b3",
help="target bank " 'utilization values separated by space, e.g. "1 4 8"',
)
parser.add_argument(
"--seqbytes",
default="s1 s2 s3",
help="no. of "
"sequential bytes requested by each traffic gen request."
' e.g. "64 256 512"',
)
parser.add_argument(
"--delays",
default="d1 d2 d3",
help="string of delay" ' values separated by a space. e.g. "1 20 100"',
)
parser.add_argument(
"--outdir", help="directory to output plots", default="plot_test"
)
parser.add_argument("--pdf", action="store_true", help="output Latex and pdf")
def METHOD_NAME():
args = parser.parse_args()
if not os.path.isfile(args.statsfile):
exit(f"Error! File not found: {args.statsfile}")
if not os.path.isdir(args.outdir):
os.mkdir(args.outdir)
bank_util_list = args.bankutils.strip().split()
seqbyte_list = args.seqbytes.strip().split()
delays = args.delays.strip().split()
plotter.plotLowPStates(
args.outdir + "/", args.statsfile, bank_util_list, seqbyte_list, delays
)
if args.pdf:
textwidth = "0.5"
### Time and energy plots ###
#############################
# place tex and pdf files in outdir
os.chdir(args.outdir)
texfile_s = "stacked_lowp_sweep.tex"
print("\t", texfile_s)
outfile = open(texfile_s, "w")
startDocText(outfile)
outfile.write("\\begin{figure} \n\\centering\n")
## Time plots for all delay values
for delay in delays:
# Time
filename = plotter.stateTimePlotName(str(delay) + "-")
outfile.write(wrapForGraphic(filename, textwidth))
outfile.write(getCaption(delay))
outfile.write("\end{figure}\n")
# Energy plots for all delay values
outfile.write("\\begin{figure} \n\\centering\n")
for delay in delays:
# Energy
filename = plotter.stateEnergyPlotName(str(delay) + "-")
outfile.write(wrapForGraphic(filename, textwidth))
outfile.write(getCaption(delay))
outfile.write("\\end{figure}\n")
endDocText(outfile)
outfile.close()
print("\n Generating pdf file")
print("*******************************")
print("\tpdflatex ", texfile_s)
# Run pdflatex to generate to pdf
call(["pdflatex", texfile_s])
call(["open", texfile_s.split(".")[0] + ".pdf"])
def getCaption(delay):
return "\\caption{" + "itt delay = " + str(delay) + "}\n"
def wrapForGraphic(filename, width="1.0"):
# \t is tab and needs to be escaped, therefore \\textwidth
return (
"\\includegraphics[width=" + width + "\\textwidth]{" + filename + "}\n"
)
def startDocText(outfile):
start_stuff = """
\\documentclass[a4paper,landscape,twocolumn]{article}
\\usepackage{graphicx}
\\usepackage[margin=0.5cm]{geometry}
\\begin{document}
"""
outfile.write(start_stuff)
def endDocText(outfile):
end_stuff = """
\\end{document}
"""
outfile.write(end_stuff)
# Call main
if __name__ == "__main__":
METHOD_NAME() | null |
1,105 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkess.endpoint import endpoint_data
class ModifyScalingRuleRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ess', '2014-08-28', 'ModifyScalingRule','ess')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AlarmDimensions(self): # RepeatList
return self.get_query_params().get('AlarmDimension')
def set_AlarmDimensions(self, AlarmDimension): # RepeatList
for depth1 in range(len(AlarmDimension)):
if AlarmDimension[depth1].get('DimensionValue') is not None:
self.add_query_param('AlarmDimension.' + str(depth1 + 1) + '.DimensionValue', AlarmDimension[depth1].get('DimensionValue'))
if AlarmDimension[depth1].get('DimensionKey') is not None:
self.add_query_param('AlarmDimension.' + str(depth1 + 1) + '.DimensionKey', AlarmDimension[depth1].get('DimensionKey'))
def get_StepAdjustments(self): # RepeatList
return self.get_query_params().get('StepAdjustment')
def set_StepAdjustments(self, StepAdjustment): # RepeatList
for depth1 in range(len(StepAdjustment)):
if StepAdjustment[depth1].get('MetricIntervalUpperBound') is not None:
self.add_query_param('StepAdjustment.' + str(depth1 + 1) + '.MetricIntervalUpperBound', StepAdjustment[depth1].get('MetricIntervalUpperBound'))
if StepAdjustment[depth1].get('MetricIntervalLowerBound') is not None:
self.add_query_param('StepAdjustment.' + str(depth1 + 1) + '.MetricIntervalLowerBound', StepAdjustment[depth1].get('MetricIntervalLowerBound'))
if StepAdjustment[depth1].get('ScalingAdjustment') is not None:
self.add_query_param('StepAdjustment.' + str(depth1 + 1) + '.ScalingAdjustment', StepAdjustment[depth1].get('ScalingAdjustment'))
def get_DisableScaleIn(self): # Boolean
return self.get_query_params().get('DisableScaleIn')
def set_DisableScaleIn(self, DisableScaleIn): # Boolean
self.add_query_param('DisableScaleIn', DisableScaleIn)
def get_ScalingRuleId(self): # String
return self.get_query_params().get('ScalingRuleId')
def set_ScalingRuleId(self, ScalingRuleId): # String
self.add_query_param('ScalingRuleId', ScalingRuleId)
def get_InitialMaxSize(self): # Integer
return self.get_query_params().get('InitialMaxSize')
def set_InitialMaxSize(self, InitialMaxSize): # Integer
self.add_query_param('InitialMaxSize', InitialMaxSize)
def get_ScalingRuleName(self): # String
return self.get_query_params().get('ScalingRuleName')
def set_ScalingRuleName(self, ScalingRuleName): # String
self.add_query_param('ScalingRuleName', ScalingRuleName)
def get_Cooldown(self): # Integer
return self.get_query_params().get('Cooldown')
def set_Cooldown(self, Cooldown): # Integer
self.add_query_param('Cooldown', Cooldown)
def get_PredictiveValueBehavior(self): # String
return self.get_query_params().get('PredictiveValueBehavior')
def set_PredictiveValueBehavior(self, PredictiveValueBehavior): # String
self.add_query_param('PredictiveValueBehavior', PredictiveValueBehavior)
def get_ScaleInEvaluationCount(self): # Integer
return self.get_query_params().get('ScaleInEvaluationCount')
def set_ScaleInEvaluationCount(self, ScaleInEvaluationCount): # Integer
self.add_query_param('ScaleInEvaluationCount', ScaleInEvaluationCount)
def get_MetricName(self): # String
return self.get_query_params().get('MetricName')
def set_MetricName(self, MetricName): # String
self.add_query_param('MetricName', MetricName)
def get_PredictiveScalingMode(self): # String
return self.get_query_params().get('PredictiveScalingMode')
def set_PredictiveScalingMode(self, PredictiveScalingMode): # String
self.add_query_param('PredictiveScalingMode', PredictiveScalingMode)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_AdjustmentValue(self): # Integer
return self.get_query_params().get('AdjustmentValue')
def set_AdjustmentValue(self, AdjustmentValue): # Integer
self.add_query_param('AdjustmentValue', AdjustmentValue)
def get_EstimatedInstanceWarmup(self): # Integer
return self.get_query_params().get('EstimatedInstanceWarmup')
def set_EstimatedInstanceWarmup(self, EstimatedInstanceWarmup): # Integer
self.add_query_param('EstimatedInstanceWarmup', EstimatedInstanceWarmup)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_PredictiveTaskBufferTime(self): # Integer
return self.get_query_params().get('PredictiveTaskBufferTime')
def set_PredictiveTaskBufferTime(self, PredictiveTaskBufferTime): # Integer
self.add_query_param('PredictiveTaskBufferTime', PredictiveTaskBufferTime)
def get_AdjustmentType(self): # String
return self.get_query_params().get('AdjustmentType')
def set_AdjustmentType(self, AdjustmentType): # String
self.add_query_param('AdjustmentType', AdjustmentType)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_PredictiveValueBuffer(self): # Integer
return self.get_query_params().get('PredictiveValueBuffer')
def set_PredictiveValueBuffer(self, PredictiveValueBuffer): # Integer
self.add_query_param('PredictiveValueBuffer', PredictiveValueBuffer)
def get_ScaleOutEvaluationCount(self): # Integer
return self.get_query_params().get('ScaleOutEvaluationCount')
def METHOD_NAME(self, ScaleOutEvaluationCount): # Integer
self.add_query_param('ScaleOutEvaluationCount', ScaleOutEvaluationCount)
def get_MinAdjustmentMagnitude(self): # Integer
return self.get_query_params().get('MinAdjustmentMagnitude')
def set_MinAdjustmentMagnitude(self, MinAdjustmentMagnitude): # Integer
self.add_query_param('MinAdjustmentMagnitude', MinAdjustmentMagnitude)
def get_TargetValue(self): # Float
return self.get_query_params().get('TargetValue')
def set_TargetValue(self, TargetValue): # Float
self.add_query_param('TargetValue', TargetValue) | null |
1,106 | import logging
from galaxy.exceptions import ConfigurationError
from galaxy.util.yaml_util import ordered_load
log = logging.getLogger(__name__)
OPTION_DEFAULTS = {
"type": "str",
"unknown_option": False,
"default": None,
"desc": None,
}
UNKNOWN_OPTION = {
"type": "str",
"required": False,
"unknown_option": True,
"desc": "Unknown option, may want to remove or report to Galaxy team.",
}
class Schema:
def __init__(self, mapping):
self.app_schema = mapping
def get_app_option(self, name):
try:
raw_option = self.app_schema[name]
except KeyError:
raw_option = UNKNOWN_OPTION
option = OPTION_DEFAULTS.copy()
option.update(raw_option)
return option
class AppSchema(Schema):
def __init__(self, schema_path, app_name):
self.raw_schema = self._read_schema(schema_path)
self.description = self.raw_schema.get("desc", None)
app_schema = self.raw_schema["mapping"][app_name]["mapping"]
self._preprocess(app_schema)
super().__init__(app_schema)
def _read_schema(self, path):
with open(path) as f:
return ordered_load(f)
def _preprocess(self, app_schema):
"""Populate schema collections used for app configuration."""
self._defaults = {} # {config option: default value or null}
self._reloadable_options = set() # config options we can reload at runtime
self._paths_to_resolve = {} # {config option: referenced config option}
self._per_host_options = set() # config options that can be set using a per_host config parameter
self._deprecated_aliases = {}
for key, data in app_schema.items():
self._defaults[key] = data.get("default")
if data.get("deprecated_alias"):
self._deprecated_aliases[data.get("deprecated_alias")] = key
if data.get("reloadable"):
self._reloadable_options.add(key)
if data.get("per_host"):
resolves_to = data.get("resolves_to")
if resolves_to:
self._per_host_options.add(resolves_to)
else:
self._per_host_options.add(key)
if data.get("path_resolves_to"):
self._paths_to_resolve[key] = data.get("path_resolves_to")
@property
def defaults(self):
return self._defaults
@property
def paths_to_resolve(self):
return self._paths_to_resolve
@property
def reloadable_options(self):
return self._reloadable_options
@property
def per_host_options(self):
return self._per_host_options
def validate_path_resolution_graph(self):
"""This method is for tests only: we SHOULD validate the schema's path resolution graph
as part of automated testing; but we should NOT validate it at runtime.
"""
def check_exists(option, key):
if not option:
message = (
"Invalid schema: property '{}' listed as path resolution target "
"for '{}' does not exist".format(resolves_to, key)
)
raise_error(message)
def check_type_is_str_or_any(option, key):
if option.get("type") not in ("str", "any"):
message = f"Invalid schema: property '{key}' should have type 'str'"
raise_error(message)
def METHOD_NAME():
visited = set()
for key in self.paths_to_resolve:
visited.clear()
while key:
visited.add(key)
key = self.app_schema[key].get("path_resolves_to")
if key and key in visited:
raise_error("Invalid schema: cycle detected")
def raise_error(message):
log.error(message)
raise ConfigurationError(message)
for key, resolves_to in self.paths_to_resolve.items():
print(key)
parent = self.app_schema.get(resolves_to)
check_exists(parent, key)
check_type_is_str_or_any(parent, key)
check_type_is_str_or_any(self.app_schema[key], key)
METHOD_NAME() # must be called last: walks entire graph | null |
1,107 | import pytest
from eth_utils import (
to_bytes,
)
from ethpm._utils.deployments import (
get_linked_deployments,
normalize_linked_references,
validate_linked_references,
)
from ethpm.contract import (
LinkableContract,
)
from ethpm.deployments import (
Deployments,
)
from ethpm.exceptions import (
BytecodeLinkingError,
EthPMValidationError,
)
from web3.eth import (
Contract,
)
DEPLOYMENT_DATA = {
"SafeMathLib": {
"contractType": "SafeMathLib",
"address": "0x8d2c532d7d211816a2807a411f947b211569b68c",
"transaction": "0xaceef751507a79c2dee6aa0e9d8f759aa24aab081f6dcf6835d792770541cb2b", # noqa: E501
"block": "0x420cb2b2bd634ef42f9082e1ee87a8d4aeeaf506ea5cdeddaa8ff7cbf911810c",
}
}
@pytest.fixture
def contract_factory(safe_math_lib_package):
return safe_math_lib_package.get_contract_type("SafeMathLib")
VALID_CONTRACT_TYPES = {"SafeMathLib": contract_factory}
INVALID_CONTRACT_TYPES = {"INVALID": contract_factory}
@pytest.fixture
def deployment():
return Deployments(DEPLOYMENT_DATA, VALID_CONTRACT_TYPES)
@pytest.fixture
def invalid_deployment():
return Deployments(DEPLOYMENT_DATA, INVALID_CONTRACT_TYPES)
def test_deployment_implements_getitem(deployment):
assert deployment["SafeMathLib"] == DEPLOYMENT_DATA["SafeMathLib"]
@pytest.mark.parametrize("name", ("", "-abc", "A=bc", "X" * 257))
def test_deployment_getitem_with_invalid_contract_name_raises_exception(
name, deployment
):
with pytest.raises(EthPMValidationError):
assert deployment[name]
def test_deployment_getitem_without_deployment_reference_raises_exception(deployment):
with pytest.raises(KeyError):
deployment["DoesNotExist"]
def test_deployment_implements_get_items(deployment):
expected_items = DEPLOYMENT_DATA.items()
assert deployment.items() == expected_items
def test_deployment_implements_get_values(deployment):
expected_values = list(DEPLOYMENT_DATA.values())
assert deployment.values() == expected_values
def test_deployment_implements_key_lookup(deployment):
key = "SafeMathLib" in deployment
assert key is True
def test_deployment_implements_key_lookup_with_nonexistent_key_raises_exception(
deployment,
):
key = "invalid" in deployment
assert key is False
@pytest.mark.parametrize("invalid_name", ("", "-abc", "A=bc", "X" * 257))
def test_get_instance_with_invalid_name_raises_exception(deployment, invalid_name):
with pytest.raises(EthPMValidationError):
deployment.get_instance(invalid_name)
def METHOD_NAME(deployment):
with pytest.raises(KeyError):
deployment.get_instance("InvalidContract")
def test_deployments_get_instance(safe_math_lib_package):
deps = safe_math_lib_package.deployments
safe_math_instance = deps.get_instance("SafeMathLib")
assert isinstance(safe_math_instance, Contract)
assert safe_math_instance.bytecode == to_bytes(
hexstr=safe_math_lib_package.manifest["contractTypes"]["SafeMathLib"][
"deploymentBytecode"
]["bytecode"]
)
def test_deployments_get_instance_with_contract_alias(safe_math_lib_package_with_alias):
deps = safe_math_lib_package_with_alias.deployments
safe_math_instance = deps.get_instance("safe-math-lib-alias")
assert isinstance(safe_math_instance, Contract)
assert safe_math_instance.bytecode == to_bytes(
hexstr=safe_math_lib_package_with_alias.manifest["contractTypes"][
"SafeMathLib"
]["deploymentBytecode"]["bytecode"]
)
def test_deployments_get_instance_with_link_dependency(escrow_package):
deployments = escrow_package.deployments
escrow_deployment = deployments.get_instance("Escrow")
assert isinstance(escrow_deployment, LinkableContract)
assert not escrow_deployment.needs_bytecode_linking
def test_get_linked_deployments(escrow_package):
escrow_manifest = escrow_package.manifest
all_deployments = list(escrow_manifest["deployments"].values())[0]
actual_linked_deployments = get_linked_deployments(all_deployments)
assert actual_linked_deployments == {"Escrow": all_deployments["Escrow"]}
# integration via package.deployments
deployments = escrow_package.deployments
assert len(deployments.contract_instances) == 2
@pytest.mark.parametrize(
"deployments",
(
(
{
"Escrow": {
"contractType": "Escrow",
"address": "0x8c1968deB27251A3f1F4508df32dA4dfD1b7b57f",
"transaction": "0xc60e32c63abf34579390ef65d83cc5eb52225de38c3eeca2e5afa961d71c16d0", # noqa: E501
"block": "0x4d1a618802bb87752d95db453dddeea622820424a2f836bedf8769a67ee276b8", # noqa: E501
"runtimeBytecode": {
"linkDependencies": [
{"offsets": [200], "type": "reference", "value": "filler"},
{
"offsets": [301, 495],
"type": "reference",
"value": "Escrow",
},
]
},
}
},
)
),
)
def test_get_linked_deployments_raises_exception_with_self_reference(deployments):
with pytest.raises(BytecodeLinkingError):
get_linked_deployments(deployments)
@pytest.mark.parametrize(
"link_data,expected",
(
(
[
{"offsets": [1], "type": "reference", "value": "123"},
{"offsets": [2, 3], "type": "literal", "value": "abc"},
],
((1, "reference", "123"), (2, "literal", "abc"), (3, "literal", "abc")),
),
(
[{"offsets": [1, 2, 3], "type": "literal", "value": "123"}],
((1, "literal", "123"), (2, "literal", "123"), (3, "literal", "123")),
),
),
)
def test_normalize_linked_references(link_data, expected):
link_deps = normalize_linked_references(link_data)
assert link_deps == expected
@pytest.mark.parametrize(
"link_deps,bytecode",
(
(((1, b"abc"),), b"xabc"),
(((1, b"a"), (5, b"xx"), (15, b"1")), b"0a000xx000000001"),
),
)
def test_validate_linked_references(link_deps, bytecode):
result = validate_linked_references(link_deps, bytecode)
assert result is None
@pytest.mark.parametrize(
"link_deps,bytecode",
(
(((0, b"abc"),), b"xabc"),
(((2, b"abc"),), b"xabc"),
(((8, b"abc"),), b"xabc"),
(((1, b"a"), (5, b"xxx"), (15, b"1")), b"0a000xx000000001"),
),
)
def test_validate_linked_references_invalidates(link_deps, bytecode):
with pytest.raises(EthPMValidationError):
validate_linked_references(link_deps, bytecode) | null |
1,108 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class ModifyRouterInterfaceAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'ModifyRouterInterfaceAttribute','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_OppositeRouterId(self): # String
return self.get_query_params().get('OppositeRouterId')
def set_OppositeRouterId(self, OppositeRouterId): # String
self.add_query_param('OppositeRouterId', OppositeRouterId)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_HealthCheckTargetIp(self): # String
return self.get_query_params().get('HealthCheckTargetIp')
def set_HealthCheckTargetIp(self, HealthCheckTargetIp): # String
self.add_query_param('HealthCheckTargetIp', HealthCheckTargetIp)
def get_OppositeInterfaceId(self): # String
return self.get_query_params().get('OppositeInterfaceId')
def set_OppositeInterfaceId(self, OppositeInterfaceId): # String
self.add_query_param('OppositeInterfaceId', OppositeInterfaceId)
def get_HcThreshold(self): # Integer
return self.get_query_params().get('HcThreshold')
def set_HcThreshold(self, HcThreshold): # Integer
self.add_query_param('HcThreshold', HcThreshold)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_DeleteHealthCheckIp(self): # Boolean
return self.get_query_params().get('DeleteHealthCheckIp')
def set_DeleteHealthCheckIp(self, DeleteHealthCheckIp): # Boolean
self.add_query_param('DeleteHealthCheckIp', DeleteHealthCheckIp)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_RouterInterfaceId(self): # String
return self.get_query_params().get('RouterInterfaceId')
def set_RouterInterfaceId(self, RouterInterfaceId): # String
self.add_query_param('RouterInterfaceId', RouterInterfaceId)
def get_OppositeInterfaceOwnerId(self): # Long
return self.get_query_params().get('OppositeInterfaceOwnerId')
def set_OppositeInterfaceOwnerId(self, OppositeInterfaceOwnerId): # Long
self.add_query_param('OppositeInterfaceOwnerId', OppositeInterfaceOwnerId)
def get_HealthCheckSourceIp(self): # String
return self.get_query_params().get('HealthCheckSourceIp')
def set_HealthCheckSourceIp(self, HealthCheckSourceIp): # String
self.add_query_param('HealthCheckSourceIp', HealthCheckSourceIp)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_OppositeRouterType(self): # String
return self.get_query_params().get('OppositeRouterType')
def set_OppositeRouterType(self, OppositeRouterType): # String
self.add_query_param('OppositeRouterType', OppositeRouterType)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('HcRate')
def set_HcRate(self, HcRate): # Integer
self.add_query_param('HcRate', HcRate) | null |
1,109 | # -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# SPDX-FileCopyrightText: 2023 Nicolas Hennion <[email protected]>
#
# SPDX-License-Identifier: LGPL-3.0-only
#
"""
I am your father...
...for all Glances Application Monitoring Processes (AMP).
AMP (Application Monitoring Process)
A Glances AMP is a Python script called (every *refresh* seconds) if:
- the AMP is *enabled* in the Glances configuration file
- a process is running (match the *regex* define in the configuration file)
The script should define a Amp (GlancesAmp) class with, at least, an update method.
The update method should call the set_result method to set the AMP return string.
The return string is a string with one or more line (\n between lines).
If the *one_line* var is true then the AMP will be displayed in one line.
"""
from glances.globals import u
from glances.timer import Timer
from glances.logger import logger
class GlancesAmp(object):
"""Main class for Glances AMP."""
NAME = '?'
VERSION = '?'
DESCRIPTION = '?'
AUTHOR = '?'
EMAIL = '?'
def __init__(self, name=None, args=None):
"""Init AMP class."""
logger.debug("AMP - Init {} version {}".format(self.NAME, self.VERSION))
# AMP name (= module name without glances_)
if name is None:
self.amp_name = self.__class__.__module__[len('glances_') :]
else:
self.amp_name = name
# Init the args
self.args = args
# Init the configs
self.configs = {}
# A timer is needed to only update every refresh seconds
# Init to 0 in order to update the AMP on startup
self.timer = Timer(0)
def load_config(self, config):
"""Load AMP parameters from the configuration file."""
# Read AMP configuration.
# For ex, the AMP foo should have the following section:
#
# [foo]
# enable=true
# regex=\/usr\/bin\/nginx
# refresh=60
#
# and optionally:
#
# one_line=false
# option1=opt1
amp_section = 'amp_' + self.amp_name
if hasattr(config, 'has_section') and config.has_section(amp_section):
logger.debug("AMP - {}: Load configuration".format(self.NAME))
for param, _ in config.items(amp_section):
try:
self.configs[param] = config.get_float_value(amp_section, param)
except ValueError:
self.configs[param] = config.get_value(amp_section, param).split(',')
if len(self.configs[param]) == 1:
self.configs[param] = self.configs[param][0]
logger.debug("AMP - {}: Load parameter: {} = {}".format(self.NAME, param, self.configs[param]))
else:
logger.debug("AMP - {}: Can not find section {} in the configuration file".format(self.NAME, self.amp_name))
return False
if self.enable():
# Refresh option is mandatory
for k in ['refresh']:
if k not in self.configs:
logger.warning(
"AMP - {}: Can not find configuration key {} in section {} (the AMP will be disabled)".format(
self.NAME, k, self.amp_name
)
)
self.configs['enable'] = 'false'
else:
logger.debug("AMP - {} is disabled".format(self.NAME))
# Init the count to 0
self.configs['count'] = 0
return self.enable()
def get(self, key):
"""Generic method to get the item in the AMP configuration"""
if key in self.configs:
return self.configs[key]
else:
return None
def enable(self):
"""Return True|False if the AMP is enabled in the configuration file (enable=true|false)."""
ret = self.get('enable')
if ret is None:
return False
else:
return ret.lower().startswith('true')
def regex(self):
"""Return regular expression used to identified the current application."""
return self.get('regex')
def refresh(self):
"""Return refresh time in seconds for the current application monitoring process."""
return self.get('refresh')
def one_line(self):
"""Return True|False if the AMP should be displayed in one line (one_line=true|false)."""
ret = self.get('one_line')
if ret is None:
return False
else:
return ret.lower().startswith('true')
def time_until_refresh(self):
"""Return time in seconds until refresh."""
return self.timer.get()
def should_update(self):
"""Return True is the AMP should be updated
Conditions for update:
- AMP is enable
- only update every 'refresh' seconds
"""
if self.timer.finished():
self.timer.set(self.refresh())
self.timer.reset()
return self.enable()
return False
def set_count(self, count):
"""Set the number of processes matching the regex"""
self.configs['count'] = count
def count(self):
"""Get the number of processes matching the regex"""
return self.get('count')
def count_min(self):
"""Get the minimum number of processes"""
return self.get('countmin')
def count_max(self):
"""Get the maximum number of processes"""
return self.get('countmax')
def set_result(self, METHOD_NAME, separator=''):
"""Store the result (string) into the result key of the AMP
If one_line is true then it replaces `\n` by the separator
"""
if self.one_line():
self.configs['result'] = u(METHOD_NAME).replace('\n', separator)
else:
self.configs['result'] = u(METHOD_NAME)
def METHOD_NAME(self):
"""Return the result of the AMP (as a string)"""
ret = self.get('result')
if ret is not None:
ret = u(ret)
return ret
def update_wrapper(self, process_list):
"""Wrapper for the children update"""
# Set the number of running process
self.set_count(len(process_list))
# Call the children update method
if self.should_update():
return self.update(process_list)
else:
return self.METHOD_NAME() | null |
1,110 | #!/usr/bin/env python3
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
from modules.common_msgs.audio_msgs import audio_event_pb2
from modules.common_msgs.localization_msgs import localization_pb2
from modules.common_msgs.perception_msgs import perception_obstacle_pb2
from modules.common_msgs.perception_msgs import traffic_light_detection_pb2
from modules.common_msgs.planning_msgs import planning_internal_pb2
from modules.common_msgs.planning_msgs import planning_pb2
from modules.common_msgs.prediction_msgs import prediction_obstacle_pb2
from modules.common_msgs.routing_msgs import routing_pb2
from modules.common_msgs.control_msgs import control_cmd_pb2
from modules.common_msgs.chassis_msgs import chassis_pb2
from modules.common_msgs.basic_msgs import drive_event_pb2
from modules.common_msgs.planning_msgs import navigation_pb2
from modules.common_msgs.guardian_msgs import guardian_pb2
from modules.tools.common import proto_utils
class MessageType:
def __init__(self, name, topic, msg_type):
self.name = name
self.topic = topic
self.msg_type = msg_type
def instance(self):
return self.__msg_type()
def parse_file(self, filename):
value = self.instance()
if not proto_utils.get_pb_from_file(filename, value):
print("Failed to parse file %s" % filename)
return None
else:
return value
topic_pb_list = [
MessageType("audio_event", "/apollo/audio_event",
audio_event_pb2.AudioEvent),
MessageType("planning", "/apollo/planning", planning_pb2.ADCTrajectory),
MessageType("control", "/apollo/control", control_cmd_pb2.ControlCommand),
MessageType("chassis", "/apollo/canbus/chassis", chassis_pb2.Chassis),
MessageType("prediction", "/apollo/prediction",
prediction_obstacle_pb2.PredictionObstacles),
MessageType("perception", "/apollo/perception/obstacles",
perception_obstacle_pb2.PerceptionObstacles),
MessageType("routing_response", "/apollo/routing_response",
routing_pb2.RoutingResponse),
MessageType("routing_request", "/apollo/routing_request",
routing_pb2.RoutingRequest),
MessageType("localization", "/apollo/localization/pose",
localization_pb2.LocalizationEstimate),
MessageType("traffic_light", "/apollo/perception/traffic_light",
traffic_light_detection_pb2.TrafficLightDetection),
MessageType("drive_event", "/apollo/drive_event",
drive_event_pb2.DriveEvent),
MessageType("relative_map", "/apollo/relative_map", navigation_pb2.MapMsg),
MessageType("navigation", "/apollo/navigation",
navigation_pb2.NavigationInfo),
MessageType("guardian", "/apollo/guardian", guardian_pb2.GuardianCommand),
]
class PbMessageManager:
def __init__(self):
self.__topic_dict = {}
self.__name_dict = {}
for msg in topic_pb_list:
self.__topic_dict[msg.topic] = msg
self.__name_dict[msg.name] = msg
def topic_dict(self):
return self.__topic_dict
def get_msg_meta_by_topic(self, topic):
if topic in self.__topic_dict:
return self.__topic_dict[topic]
else:
return None
def METHOD_NAME(self, name):
if name in self.__name_dict:
return self.__name_dict[name]
else:
return None
def name_dict(self):
return self.__name_dict
def parse_topic_file(self, topic, filename):
if topic not in self.__topic_dict:
print("topic %s is not registered in topic_pb_list" % topic)
return None
meta_msg = self.__topic_dict[topic]
return meta_msg.parse_file(filename)
def parse_file(self, filename):
"""parse a file by guessing topic type"""
for topic, meta_msg in self.__topic_dict.items():
try:
message = meta_msg.parse_file(filename)
if message:
print("identified topic %s" % topic)
return (meta_msg, message)
except text_format.ParseError as e:
print("Tried %s, failed" % (topic))
continue
return (None, None) | null |
1,111 | """
This module defines options which should be available on all commands, such as the
-v, --verbose option.
To use it:
- Add the `@add_common_options()` decorator after all the `click.option()` calls of the
command function.
- Add a `**kwargs: Any` argument to the command function.
The `kwargs` argument is required because due to the way click works,
`add_common_options()` adds an argument for each option it defines.
"""
from pathlib import Path
from typing import Any, Callable, Optional, TypeVar, cast
import click
from ggshield.cmd.utils.debug_logs import setup_debug_logs
from ggshield.core.config.user_config import UserConfig
AnyFunction = Callable[..., Any]
# The argument of a Click option callback function
ArgT = TypeVar("ArgT")
# A Click option callback function
ClickCallback = Callable[
[click.Context, click.Parameter, Optional[ArgT]], Optional[ArgT]
]
def get_config_from_context(ctx: click.Context) -> UserConfig:
"""Returns the UserConfig object stored in Click context"""
return cast(UserConfig, ctx.obj["config"].user_config)
def create_ctx_callback(name: str) -> ClickCallback:
"""Helper function to define a Click option callback for simple cases where we only
have to set a value on Click context object if the option is defined.
"""
def callback(
ctx: click.Context, param: click.Parameter, value: Optional[ArgT]
) -> Optional[ArgT]:
if value is not None:
ctx.obj[name] = value
return value
return callback
def create_config_callback(*option_names: str) -> ClickCallback:
"""Helper function to define a Click option callback for simple cases where we only
have to set a configuration attribute if the option is defined.
to reach UserConfig.foo, set option_names to ["foo"]
to reach Userconfig.secret.bar, set option_names to ["secret", "bar"]
"""
def callback(
ctx: click.Context, param: click.Parameter, value: Optional[ArgT]
) -> Optional[ArgT]:
if value is not None:
obj = get_config_from_context(ctx)
for name in option_names[:-1]:
obj = getattr(obj, name)
setattr(obj, option_names[-1], value)
return value
return callback
_verbose_option = click.option(
"-v",
"--verbose",
is_flag=True,
default=None,
help="Verbose display mode.",
callback=create_config_callback("verbose"),
)
def debug_callback(
ctx: click.Context, param: click.Parameter, value: Optional[bool]
) -> Optional[bool]:
if value is not None:
setup_debug_logs(filename=None)
return value
# The --debug option is marked as "is_eager" so that we can setup logs as soon as
# possible. If we don't then log commands for the creation of the Config instance
# are ignored.
_debug_option = click.option(
"--debug",
is_flag=True,
default=None,
is_eager=True,
help="Send log output to stderr. Equivalent to `--log-file -`.",
callback=debug_callback,
)
def log_file_callback(
ctx: click.Context, param: click.Parameter, value: Optional[str]
) -> Optional[str]:
if value is not None:
setup_debug_logs(filename=None if value == "-" else value)
return value
# The --log-file option is marked as "is_eager" so that we can setup logs as soon as
# possible. If we don't then log commands for the creation of the Config instance
# are ignored.
_log_file_option = click.option(
"--log-file",
metavar="FILE",
is_eager=True,
help="Send log output to FILE. Use '-' to redirect to stderr.",
envvar="GITGUARDIAN_LOG_FILE",
callback=log_file_callback,
)
_allow_self_signed_option = click.option(
"--allow-self-signed",
is_flag=True,
default=None,
help="Ignore ssl verification.",
callback=create_config_callback("allow_self_signed"),
)
_check_for_updates = click.option(
"--check-for-updates/--no-check-for-updates",
is_flag=True,
default=None,
help="After executing commands, check if a new version of ggshield is available.",
callback=create_ctx_callback("check_for_updates"),
)
exit_zero_option = click.option(
"--exit-zero",
is_flag=True,
default=None,
envvar="GITGUARDIAN_EXIT_ZERO",
help="Always return a 0 (non-error) status code, even if incidents are found."
"The env var GITGUARDIAN_EXIT_ZERO can also be used to set this option.",
callback=create_config_callback("exit_zero"),
)
minimum_severity_option = click.option(
"--minimum-severity",
"minimum_severity",
type=click.Choice(("LOW", "MEDIUM", "HIGH", "CRITICAL")),
help="Minimum severity of the policies.",
)
ignore_path_option = click.option(
"--ignore-path",
"--ipa",
"ignore_paths",
default=None,
multiple=True,
help="Do not scan paths that match the specified glob-like patterns.",
)
def add_common_options() -> Callable[[AnyFunction], AnyFunction]:
def METHOD_NAME(cmd: AnyFunction) -> AnyFunction:
_verbose_option(cmd)
_debug_option(cmd)
_log_file_option(cmd)
_allow_self_signed_option(cmd)
_check_for_updates(cmd)
return cmd
return METHOD_NAME
json_option = click.option(
"--json",
"json_output",
is_flag=True,
default=None,
help="Use JSON output.",
callback=create_ctx_callback("use_json"),
)
def use_json(ctx: click.Context) -> bool:
"""Tells whether --json has been set"""
return bool(ctx.obj.get("use_json", False))
directory_argument = click.argument(
"directory",
type=click.Path(exists=True, readable=True, path_type=Path, file_okay=False),
required=False,
# using a default value here makes the deprecated `iac scan` fail
)
all_option = click.option(
"--all",
"scan_all",
is_flag=True,
default=False,
help="Reports all vulnerabilities in the final state.",
)
reference_option = click.option(
"--ref",
required=True,
type=click.STRING,
help="A git reference.",
)
staged_option = click.option(
"--staged",
is_flag=True,
help="Whether staged changes should be included into the scan.",
) | null |
1,112 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdateCdrsMonitorRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CDRS', '2020-11-01', 'UpdateCdrsMonitor')
self.set_method('POST')
def get_CorpId(self):
return self.get_body_params().get('CorpId')
def METHOD_NAME(self,CorpId):
self.add_body_params('CorpId', CorpId)
def get_Description(self):
return self.get_body_params().get('Description')
def set_Description(self,Description):
self.add_body_params('Description', Description)
def get_RuleName(self):
return self.get_body_params().get('RuleName')
def set_RuleName(self,RuleName):
self.add_body_params('RuleName', RuleName)
def get_PicOperateType(self):
return self.get_body_params().get('PicOperateType')
def set_PicOperateType(self,PicOperateType):
self.add_body_params('PicOperateType', PicOperateType)
def get_AttributeName(self):
return self.get_body_params().get('AttributeName')
def set_AttributeName(self,AttributeName):
self.add_body_params('AttributeName', AttributeName)
def get_AttributeOperateType(self):
return self.get_body_params().get('AttributeOperateType')
def set_AttributeOperateType(self,AttributeOperateType):
self.add_body_params('AttributeOperateType', AttributeOperateType)
def get_RuleExpression(self):
return self.get_body_params().get('RuleExpression')
def set_RuleExpression(self,RuleExpression):
self.add_body_params('RuleExpression', RuleExpression)
def get_NotifierTimeOut(self):
return self.get_body_params().get('NotifierTimeOut')
def set_NotifierTimeOut(self,NotifierTimeOut):
self.add_body_params('NotifierTimeOut', NotifierTimeOut)
def get_TaskId(self):
return self.get_body_params().get('TaskId')
def set_TaskId(self,TaskId):
self.add_body_params('TaskId', TaskId)
def get_DeviceOperateType(self):
return self.get_body_params().get('DeviceOperateType')
def set_DeviceOperateType(self,DeviceOperateType):
self.add_body_params('DeviceOperateType', DeviceOperateType)
def get_PicList(self):
return self.get_body_params().get('PicList')
def set_PicList(self,PicList):
self.add_body_params('PicList', PicList)
def get_AttributeValueList(self):
return self.get_body_params().get('AttributeValueList')
def set_AttributeValueList(self,AttributeValueList):
self.add_body_params('AttributeValueList', AttributeValueList)
def get_NotifierAppSecret(self):
return self.get_body_params().get('NotifierAppSecret')
def set_NotifierAppSecret(self,NotifierAppSecret):
self.add_body_params('NotifierAppSecret', NotifierAppSecret)
def get_NotifierExtendValues(self):
return self.get_body_params().get('NotifierExtendValues')
def set_NotifierExtendValues(self,NotifierExtendValues):
self.add_body_params('NotifierExtendValues', NotifierExtendValues)
def get_DeviceList(self):
return self.get_body_params().get('DeviceList')
def set_DeviceList(self,DeviceList):
self.add_body_params('DeviceList', DeviceList)
def get_NotifierUrl(self):
return self.get_body_params().get('NotifierUrl')
def set_NotifierUrl(self,NotifierUrl):
self.add_body_params('NotifierUrl', NotifierUrl)
def get_NotifierType(self):
return self.get_body_params().get('NotifierType')
def set_NotifierType(self,NotifierType):
self.add_body_params('NotifierType', NotifierType)
def get_AlgorithmVendor(self):
return self.get_body_params().get('AlgorithmVendor')
def set_AlgorithmVendor(self,AlgorithmVendor):
self.add_body_params('AlgorithmVendor', AlgorithmVendor | null |
1,113 | """
Universe configuration builder.
"""
import configparser
import logging
import logging.config
import os
from datetime import timedelta
from galaxy.config import (
BaseAppConfiguration,
CommonConfigurationMixin,
expand_pretty_datetime_format,
get_database_engine_options,
TOOL_SHED_CONFIG_SCHEMA_PATH,
)
from galaxy.config.schema import AppSchema
from galaxy.exceptions import ConfigurationError
from galaxy.util import string_as_bool
from galaxy.version import (
VERSION,
VERSION_MAJOR,
VERSION_MINOR,
)
log = logging.getLogger(__name__)
TOOLSHED_APP_NAME = "tool_shed"
class ToolShedAppConfiguration(BaseAppConfiguration, CommonConfigurationMixin):
default_config_file_name = "tool_shed.yml"
add_sample_file_to_defaults = {"datatypes_config_file"}
def _load_schema(self):
return AppSchema(TOOL_SHED_CONFIG_SCHEMA_PATH, TOOLSHED_APP_NAME)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.METHOD_NAME(kwargs)
@property
def shed_tool_data_path(self):
return self.tool_data_path
def check(self):
# Check that required directories exist; attempt to create otherwise
paths_to_check = [
self.file_path,
self.hgweb_config_dir,
self.tool_data_path,
self.template_cache_path,
os.path.join(self.tool_data_path, "shared", "jars"),
]
for path in paths_to_check:
self._ensure_directory(path)
# Check that required files exist.
if not os.path.isfile(self.datatypes_config):
raise ConfigurationError(f"File not found: {self.datatypes_config}")
def METHOD_NAME(self, kwargs):
# Backwards compatibility for names used in too many places to fix
self.datatypes_config = self.datatypes_config_file
# Collect the umask and primary gid from the environment
self.umask = os.umask(0o77) # get the current umask
os.umask(self.umask) # can't get w/o set, so set it back
self.gid = os.getgid() # if running under newgrp(1) we'll need to fix the group of data created on the cluster
self.version_major = VERSION_MAJOR
self.version_minor = VERSION_MINOR
self.version = VERSION
# Database related configuration
if not self.database_connection: # Provide default if not supplied by user
self.database_connection = f"sqlite:///{self._in_data_dir('community.sqlite')}?isolation_level=IMMEDIATE"
self.database_engine_options = get_database_engine_options(kwargs)
self.database_create_tables = string_as_bool(kwargs.get("database_create_tables", "True"))
# Where dataset files are stored
self.file_path = self._in_root_dir(self.file_path)
self.new_file_path = self._in_root_dir(self.new_file_path)
self.cookie_path = kwargs.get("cookie_path")
self.cookie_domain = kwargs.get("cookie_domain")
self.enable_quotas = string_as_bool(kwargs.get("enable_quotas", False))
# Tool stuff
self.tool_path = self._in_root_dir(kwargs.get("tool_path", "tools"))
self.tool_secret = kwargs.get("tool_secret", "")
self.tool_data_path = os.path.join(os.getcwd(), kwargs.get("tool_data_path", "shed-tool-data"))
self.tool_data_table_config_path = None
self.integrated_tool_panel_config = self._in_root_dir(
kwargs.get("integrated_tool_panel_config", "integrated_tool_panel.xml")
)
self.builds_file_path = self._in_root_dir(
kwargs.get("builds_file_path", os.path.join(self.tool_data_path, "shared", "ucsc", "builds.txt"))
)
self.len_file_path = self._in_root_dir(
kwargs.get("len_file_path", os.path.join(self.tool_data_path, "shared", "ucsc", "chrom"))
)
self.ftp_upload_dir = kwargs.get("ftp_upload_dir")
self.update_integrated_tool_panel = False
# Galaxy flavor Docker Image
self.user_activation_on = None
self.registration_warning_message = kwargs.get("registration_warning_message")
self.email_domain_blocklist_content = None
self.email_domain_allowlist_content = None
self.template_cache_path = self._in_root_dir(
kwargs.get("template_cache_path", "database/compiled_templates/community")
)
self.error_email_to = kwargs.get("error_email_to")
self.pretty_datetime_format = expand_pretty_datetime_format(self.pretty_datetime_format)
# Configuration for the message box directly below the masthead.
self.wiki_url = kwargs.get("wiki_url", "https://galaxyproject.org/")
self.blog_url = kwargs.get("blog_url")
self.screencasts_url = kwargs.get("screencasts_url")
self.log_events = False
self.cloud_controller_instance = False
self.server_name = ""
# Where the tool shed hgweb.config file is stored - the default is the Galaxy installation directory.
self.hgweb_config_dir = self._in_root_dir(self.hgweb_config_dir) or self.root
# Proxy features
self.drmaa_external_runjob_script = kwargs.get("drmaa_external_runjob_script")
# Parse global_conf and save the parser
global_conf = kwargs.get("global_conf")
global_conf_parser = configparser.ConfigParser()
self.global_conf_parser = global_conf_parser
if global_conf and "__file__" in global_conf and ".yml" not in global_conf["__file__"]:
global_conf_parser.read(global_conf["__file__"])
self.running_functional_tests = string_as_bool(kwargs.get("running_functional_tests", False))
self.citation_cache_data_dir = self._in_root_dir(
kwargs.get("citation_cache_data_dir", "database/tool_shed_citations/data")
)
self.citation_cache_lock_dir = self._in_root_dir(
kwargs.get("citation_cache_lock_dir", "database/tool_shed_citations/locks")
)
self.citation_cache_url = kwargs.get("citation_cache_lock_dir", None)
self.citation_cache_schema_name = kwargs.get("citation_cache_schema_name", None)
self.citation_cache_table_name = kwargs.get("citation_cache_table_name", None)
self.password_expiration_period = timedelta(days=int(self.password_expiration_period))
# Security/Policy Compliance
self.redact_username_during_deletion = False
self.redact_email_during_deletion = False
self.redact_username_in_logs = False
self.enable_beta_gdpr = string_as_bool(kwargs.get("enable_beta_gdpr", False))
if self.enable_beta_gdpr:
self.redact_username_during_deletion = True
self.redact_email_during_deletion = True
self.redact_username_in_logs = True
self.allow_user_deletion = True
Configuration = ToolShedAppConfiguration | null |
1,114 | from http import HTTPStatus
from typing import Any, Dict, Optional, Union, cast
import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.error_response import ErrorResponse
from ...types import Response
def _get_kwargs(
pipeline_id: str,
) -> Dict[str, Any]:
pass
return {
"method": "get",
"url": "/pipelines/{pipeline_id}/validate".format(
pipeline_id=pipeline_id,
),
}
def _parse_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Optional[Union[ErrorResponse, str]]:
if response.status_code == HTTPStatus.OK:
response_200 = cast(str, response.json())
return response_200
if response.status_code == HTTPStatus.BAD_REQUEST:
response_400 = ErrorResponse.from_dict(response.json())
return response_400
if response.status_code == HTTPStatus.NOT_FOUND:
response_404 = ErrorResponse.from_dict(response.json())
return response_404
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None
def _build_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Response[Union[ErrorResponse, str]]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
headers=response.headers,
parsed=_parse_response(client=client, response=response),
)
def sync_detailed(
pipeline_id: str,
*,
client: Union[AuthenticatedClient, Client],
) -> Response[Union[ErrorResponse, str]]:
"""Validate a pipeline.
Validate a pipeline.
Checks whether a pipeline is configured correctly. This includes
checking whether the pipeline references a valid compiled program,
whether the connectors reference valid tables/views in the program,
and more.
Args:
pipeline_id (str):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Union[ErrorResponse, str]]
"""
kwargs = _get_kwargs(
pipeline_id=pipeline_id,
)
response = client.get_httpx_client().request(
**kwargs,
)
return _build_response(client=client, response=response)
def METHOD_NAME(
pipeline_id: str,
*,
client: Union[AuthenticatedClient, Client],
) -> Optional[Union[ErrorResponse, str]]:
"""Validate a pipeline.
Validate a pipeline.
Checks whether a pipeline is configured correctly. This includes
checking whether the pipeline references a valid compiled program,
whether the connectors reference valid tables/views in the program,
and more.
Args:
pipeline_id (str):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Union[ErrorResponse, str]
"""
return sync_detailed(
pipeline_id=pipeline_id,
client=client,
).parsed
async def asyncio_detailed(
pipeline_id: str,
*,
client: Union[AuthenticatedClient, Client],
) -> Response[Union[ErrorResponse, str]]:
"""Validate a pipeline.
Validate a pipeline.
Checks whether a pipeline is configured correctly. This includes
checking whether the pipeline references a valid compiled program,
whether the connectors reference valid tables/views in the program,
and more.
Args:
pipeline_id (str):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Union[ErrorResponse, str]]
"""
kwargs = _get_kwargs(
pipeline_id=pipeline_id,
)
response = await client.get_async_httpx_client().request(**kwargs)
return _build_response(client=client, response=response)
async def asyncio(
pipeline_id: str,
*,
client: Union[AuthenticatedClient, Client],
) -> Optional[Union[ErrorResponse, str]]:
"""Validate a pipeline.
Validate a pipeline.
Checks whether a pipeline is configured correctly. This includes
checking whether the pipeline references a valid compiled program,
whether the connectors reference valid tables/views in the program,
and more.
Args:
pipeline_id (str):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Union[ErrorResponse, str]
"""
return (
await asyncio_detailed(
pipeline_id=pipeline_id,
client=client,
)
).parsed | null |
1,115 | #!/usr/bin/env python3
# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
"""
Project:
glideinwms
Purpose:
unit test for glideinwms/factory/glideFactoryDowntimeLib.py
Author:
Doug Strain <[email protected]>
"""
import os
import shutil
import sys
import tarfile
import tempfile
import time
import unittest
import xmlrunner
from glideinwms.factory import glideFactoryDowntimeLib
from glideinwms.lib import condorMonitor, logSupport
# unittest_utils will handle putting the appropriate directories on the python
# path for us.
from glideinwms.unittests.unittest_utils import create_random_string, create_temp_file, FakeLogger, runTest
# from glideinwms.frontend.glideinFrontendInterface import Credential
class TestDowntimes(unittest.TestCase):
"""
Test the downtimes library
"""
def METHOD_NAME(self):
self.file_loc = "/tmp/downtimes.txt"
self.downtime = glideFactoryDowntimeLib.DowntimeFile(self.file_loc)
pass
def tearDown(self):
os.remove(self.file_loc)
pass
def test_downtimesfile(self):
self.downtime.startDowntime(entry="All", comment="unittest downtime", create_if_empty=True)
self.assertTrue(self.downtime.checkDowntime(entry="All", check_time=None))
self.assertTrue(self.downtime.checkDowntime(entry="James", check_time=None))
# Test downtime comments
self.assertEqual(self.downtime.downtime_comment, "unittest downtime")
self.downtime.endDowntime(entry="All", comment="end unittest downtime")
# Use now+1 since we just ended the downtime
# The second counter may not have updated
now = int(time.time())
self.assertFalse(self.downtime.checkDowntime(entry="All", check_time=now + 1))
def test_setperiodwithendtime(self):
now = int(time.time())
self.downtime.startDowntime(
start_time=now - 60,
end_time=now + 3600,
entry="All",
frontend="All",
security_class="All",
comment="unittest downtime",
create_if_empty=True,
)
self.assertTrue(self.downtime.checkDowntime(entry="All", check_time=None))
self.assertTrue(self.downtime.checkDowntime(entry="James", check_time=None))
self.downtime.endDowntime(entry="All", comment="end unittest downtime")
# Make sure that is after the last downtime command
now = int(time.time())
self.assertFalse(self.downtime.checkDowntime(entry="All", check_time=now + 1))
def test_entryonlydowntime(self):
now = int(time.time())
self.downtime.startDowntime(
start_time=now - 60,
end_time=now + 3600,
entry="DougEntry",
frontend="All",
security_class="All",
comment="unittest downtime",
create_if_empty=True,
)
self.assertFalse(self.downtime.checkDowntime(entry="All", check_time=None))
self.assertFalse(self.downtime.checkDowntime(entry="James", check_time=None))
self.assertTrue(self.downtime.checkDowntime(entry="DougEntry", check_time=None))
self.downtime.endDowntime(entry="All", comment="end unittest downtime")
# Make sure that is after the last downtime command
now = int(time.time())
self.assertFalse(self.downtime.checkDowntime(entry="All", check_time=now + 1))
self.assertFalse(self.downtime.checkDowntime(entry="DougEntry", check_time=now + 1))
def test_setdelayedperiod(self):
now = int(time.time())
self.downtime.startDowntime(
start_time=now + 7200,
end_time=now + 10800,
entry="All",
frontend="All",
security_class="All",
comment="unittest delayed downtime",
create_if_empty=True,
)
self.assertFalse(self.downtime.checkDowntime(entry="All", check_time=None))
self.assertTrue(self.downtime.checkDowntime(entry="All", check_time=now + 9600))
self.downtime.endDowntime(entry="All", comment="end unittest downtime")
# Make sure that is after the last downtime command
now2 = int(time.time())
self.assertFalse(self.downtime.checkDowntime(entry="All", check_time=now2 + 1))
# Relative to the initial time (must be now2 < now + 7200)
# Otherwise endDowntime() interrupts started downtime intervals
if now2 < now + 7200:
self.assertTrue(self.downtime.checkDowntime(entry="All", check_time=now + 9600))
def test_setfrontendsecclass(self):
now = int(time.time())
self.downtime.startDowntime(
start_time=now - 7200,
end_time=now + 10800,
entry="TestEntry",
frontend="SampleFrontend",
security_class="SecClass",
comment="unittest frontend secclass",
create_if_empty=True,
)
self.assertFalse(self.downtime.checkDowntime(entry="All", check_time=None))
self.assertFalse(self.downtime.checkDowntime(entry="factory", check_time=None))
self.assertFalse(self.downtime.checkDowntime(entry="TestEntry", check_time=None))
self.assertTrue(
self.downtime.checkDowntime(
entry="TestEntry", frontend="SampleFrontend", security_class="SecClass", check_time=now + 9600
)
)
self.assertFalse(
self.downtime.checkDowntime(
entry="TestEntry", frontend="OtherFrontend", security_class="SecClass", check_time=now + 9600
)
)
self.assertFalse(
self.downtime.checkDowntime(
entry="TestEntry", frontend="OtherFrontend", security_class="OtherClass", check_time=now + 9600
)
)
self.assertFalse(
self.downtime.checkDowntime(
entry="TestEntry", frontend="SampleFrontend", security_class="OtherClass", check_time=now + 9600
)
)
self.downtime.endDowntime(entry="All", comment="end unittest downtime")
# Test relative to initial time but must be in the future
now = max(int(time.time()) + 1, now + 9600)
self.assertFalse(
self.downtime.checkDowntime(
entry="TestEntry", frontend="SampleFrontend", security_class="SecClass", check_time=now
)
)
if __name__ == "__main__":
unittest.main(testRunner=xmlrunner.XMLTestRunner(output="unittests-reports")) | null |
1,116 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdklive.endpoint import endpoint_data
class AddShowIntoShowListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'live', '2016-11-01', 'AddShowIntoShowList','live')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_showLists(self): # RepeatList
return self.get_query_params().get('showList')
def set_showLists(self, showList): # RepeatList
for depth1 in range(len(showList)):
if showList[depth1].get('showName') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.showName', showList[depth1].get('showName'))
if showList[depth1].get('repeatTimes') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.repeatTimes', showList[depth1].get('repeatTimes'))
if showList[depth1].get('resourceType') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.resourceType', showList[depth1].get('resourceType'))
if showList[depth1].get('resourceUrl') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.resourceUrl', showList[depth1].get('resourceUrl'))
if showList[depth1].get('liveInputType') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.liveInputType', showList[depth1].get('liveInputType'))
if showList[depth1].get('duration') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.duration', showList[depth1].get('duration'))
if showList[depth1].get('resourceId') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.resourceId', showList[depth1].get('resourceId'))
def METHOD_NAME(self): # Integer
return self.get_query_params().get('LiveInputType')
def set_LiveInputType(self, LiveInputType): # Integer
self.add_query_param('LiveInputType', LiveInputType)
def get_isBatchMode(self): # Boolean
return self.get_query_params().get('isBatchMode')
def set_isBatchMode(self, isBatchMode): # Boolean
self.add_query_param('isBatchMode', isBatchMode)
def get_Duration(self): # Long
return self.get_query_params().get('Duration')
def set_Duration(self, Duration): # Long
self.add_query_param('Duration', Duration)
def get_RepeatTimes(self): # Integer
return self.get_query_params().get('RepeatTimes')
def set_RepeatTimes(self, RepeatTimes): # Integer
self.add_query_param('RepeatTimes', RepeatTimes)
def get_ShowName(self): # String
return self.get_query_params().get('ShowName')
def set_ShowName(self, ShowName): # String
self.add_query_param('ShowName', ShowName)
def get_ResourceId(self): # String
return self.get_query_params().get('ResourceId')
def set_ResourceId(self, ResourceId): # String
self.add_query_param('ResourceId', ResourceId)
def get_CasterId(self): # String
return self.get_query_params().get('CasterId')
def set_CasterId(self, CasterId): # String
self.add_query_param('CasterId', CasterId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ResourceType(self): # String
return self.get_query_params().get('ResourceType')
def set_ResourceType(self, ResourceType): # String
self.add_query_param('ResourceType', ResourceType)
def get_ResourceUrl(self): # String
return self.get_query_params().get('ResourceUrl')
def set_ResourceUrl(self, ResourceUrl): # String
self.add_query_param('ResourceUrl', ResourceUrl)
def get_Spot(self): # Integer
return self.get_query_params().get('Spot')
def set_Spot(self, Spot): # Integer
self.add_query_param('Spot', Spot) | null |
1,117 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdklinkedmall.endpoint import endpoint_data
class CreateOrderRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'linkedmall', '2018-01-16', 'CreateOrder','linkedmall')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Quantity(self):
return self.get_query_params().get('Quantity')
def set_Quantity(self,Quantity):
self.add_query_param('Quantity',Quantity)
def METHOD_NAME(self):
return self.get_query_params().get('BizUid')
def set_BizUid(self,BizUid):
self.add_query_param('BizUid',BizUid)
def get_ExtJson(self):
return self.get_query_params().get('ExtJson')
def set_ExtJson(self,ExtJson):
self.add_query_param('ExtJson',ExtJson)
def get_AccountType(self):
return self.get_query_params().get('AccountType')
def set_AccountType(self,AccountType):
self.add_query_param('AccountType',AccountType)
def get_UseAnonymousTbAccount(self):
return self.get_query_params().get('UseAnonymousTbAccount')
def set_UseAnonymousTbAccount(self,UseAnonymousTbAccount):
self.add_query_param('UseAnonymousTbAccount',UseAnonymousTbAccount)
def get_OrderExpireTime(self):
return self.get_query_params().get('OrderExpireTime')
def set_OrderExpireTime(self,OrderExpireTime):
self.add_query_param('OrderExpireTime',OrderExpireTime)
def get_LmItemId(self):
return self.get_query_params().get('LmItemId')
def set_LmItemId(self,LmItemId):
self.add_query_param('LmItemId',LmItemId)
def get_ItemLists(self):
return self.get_query_params().get('ItemLists')
def set_ItemLists(self,ItemLists):
for i in range(len(ItemLists)):
if ItemLists[i].get('ItemId') is not None:
self.add_query_param('ItemList.' + str(i + 1) + '.ItemId' , ItemLists[i].get('ItemId'))
if ItemLists[i].get('Quantity') is not None:
self.add_query_param('ItemList.' + str(i + 1) + '.Quantity' , ItemLists[i].get('Quantity'))
if ItemLists[i].get('LmItemId') is not None:
self.add_query_param('ItemList.' + str(i + 1) + '.LmItemId' , ItemLists[i].get('LmItemId'))
if ItemLists[i].get('SkuId') is not None:
self.add_query_param('ItemList.' + str(i + 1) + '.SkuId' , ItemLists[i].get('SkuId'))
def get_ItemId(self):
return self.get_query_params().get('ItemId')
def set_ItemId(self,ItemId):
self.add_query_param('ItemId',ItemId)
def get_TotalAmount(self):
return self.get_query_params().get('TotalAmount')
def set_TotalAmount(self,TotalAmount):
self.add_query_param('TotalAmount',TotalAmount)
def get_ThirdPartyUserId(self):
return self.get_query_params().get('ThirdPartyUserId')
def set_ThirdPartyUserId(self,ThirdPartyUserId):
self.add_query_param('ThirdPartyUserId',ThirdPartyUserId)
def get_BizId(self):
return self.get_query_params().get('BizId')
def set_BizId(self,BizId):
self.add_query_param('BizId',BizId)
def get_OutTradeId(self):
return self.get_query_params().get('OutTradeId')
def set_OutTradeId(self,OutTradeId):
self.add_query_param('OutTradeId',OutTradeId)
def get_DeliveryAddress(self):
return self.get_query_params().get('DeliveryAddress')
def set_DeliveryAddress(self,DeliveryAddress):
self.add_query_param('DeliveryAddress',DeliveryAddress | null |
1,118 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""Common text strings"""
def direction_in():
"""Log line text indicating received by router"""
return "<-"
def direction_out():
"""Log line text indicating transmitted by router"""
return "->"
def lozenge():
"""
:return: HTML document lozenge character
"""
return "◊"
def METHOD_NAME():
"""
:return: HTML Non-breaking space
"""
return " "
# Large text strings used by main that change infrequently
# html head, start body
def web_page_head():
return """<!DOCTYPE html>
<html>
<head>
<title>Adverbl Analysis - qpid-dispatch router logs</title>
<style>
* {
font-family: sans-serif;
}
table {
border-collapse: collapse;
}
table, td, th {
border: 1px solid black;
padding: 3px;
}
</style>
<script src="http://ajax.googleapis.com/ajax/libs/dojo/1.4/dojo/dojo.xd.js" type="text/javascript"></script>
<!-- <script src="http://ajax.googleapis.com/ajax/libs/dojo/1.4/dojo/dojo.xd.js" type="text/javascript"></script> -->
<script type="text/javascript">
function node_is_visible(node)
{
if(dojo.isString(node))
node = dojo.byId(node);
if(!node)
return false;
return node.style.display == "block";
}
function set_node(node, str)
{
if(dojo.isString(node))
node = dojo.byId(node);
if(!node) return;
node.style.display = str;
}
function toggle_node(node)
{
if(dojo.isString(node))
node = dojo.byId(node);
if(!node) return;
set_node(node, (node_is_visible(node)) ? 'none' : 'block');
}
function hide_node(node)
{
set_node(node, 'none');
}
function show_node(node)
{
set_node(node, 'block');
}
function go_back()
{
window.history.back();
}
"""
def web_page_toc():
return """
<h3>Contents</h3>
<table>
<tr> <th>Section</th> <th>Description</th> </tr>
<tr><td><a href=\"#c_logfiles\" >Log files</a></td> <td>Router and log file info</td></tr>
<tr><td><a href=\"#c_rtrinstances\" >Router Instances</a></td> <td>Router reboot chronology</td></tr>
<tr><td><a href=\"#c_connections\" >Connections</a></td> <td>Connection overview; per connection log data view control</td></tr>
<tr><td><a href=\"#c_addresses\" >Addresses</a></td> <td>AMQP address usage</td></tr>
<tr><td><a href=\"#c_connectchrono\" >Connection Chronology</a></td> <td>Router restart and connection chronology</td></tr>
<tr><td><a href=\"#c_conndetails\" >Connection Details</a></td> <td>Connection details; frames sorted by link</td></tr>
<tr><td><a href=\"#c_noteworthy\" >Noteworthy log lines</a></td> <td>AMQP errors and interesting flags</td></tr>
<tr><td><a href=\"#c_logdata\" >Log data</a></td> <td>Main AMQP traffic table</td></tr>
<tr><td><a href=\"#c_messageprogress\">Message progress</a></td> <td>Tracking messages through the system</td></tr>
<tr><td><a href=\"#c_linkprogress\" >Link name propagation</a></td> <td>Tracking link names</td></tr>
<tr><td><a href=\"#c_rtrdump\" >Router name index</a></td> <td>Short vs. long router container names</td></tr>
<tr><td><a href=\"#c_peerdump\" >Peer name index</a></td> <td>Short vs. long peer names</td></tr>
<tr><td><a href=\"#c_linkdump\" >Link name index</a></td> <td>Short vs. long link names</td></tr>
<tr><td><a href=\"#c_msgdump\" >Transfer name index</a></td> <td>Short names representing transfer data</td></tr>
<tr><td><a href=\"#c_ls\" >Router link state</a></td> <td>Link state analysis</td></tr>
<tr><td><a href=\"#c_sequence\" >Sequence diagram data</a></td> <td>Input data for seq-diag-gen.py utility</td></tr>
</table>
<hr>
"""
if __name__ == "__main__":
pass | null |
1,119 | """
Accessible models can be read and copied but not modified or deleted.
Owned models can be modified and deleted.
"""
from typing import (
Any,
Optional,
Type,
TYPE_CHECKING,
)
from galaxy import (
exceptions,
model,
)
if TYPE_CHECKING:
from sqlalchemy.orm import Query
class AccessibleManagerMixin:
"""
A security interface to check if a User can read/view an item's.
This can also be thought of as 'read but not modify' privileges.
"""
# declare what we are using from base ModelManager
model_class: Type[Any]
def by_id(self, id: int):
...
# don't want to override by_id since consumers will also want to fetch w/o any security checks
def is_accessible(self, item: "Query", user: model.User, **kwargs: Any) -> bool:
"""
Return True if the item accessible to user.
"""
# override in subclasses
raise exceptions.NotImplemented("Abstract interface Method")
def get_accessible(self, id: int, user: model.User, **kwargs: Any) -> "Query":
"""
Return the item with the given id if it's accessible to user,
otherwise raise an error.
:raises exceptions.ItemAccessibilityException:
"""
item = self.by_id(id)
return self.error_unless_accessible(item, user, **kwargs)
def error_unless_accessible(self, item: "Query", user, **kwargs):
"""
Raise an error if the item is NOT accessible to user, otherwise return the item.
:raises exceptions.ItemAccessibilityException:
"""
if self.is_accessible(item, user, **kwargs):
return item
raise exceptions.ItemAccessibilityException(f"{self.model_class.__name__} is not accessible by user")
# TODO:?? are these even useful?
def list_accessible(self, user, **kwargs):
"""
Return a list of items accessible to the user, raising an error if ANY
are inaccessible.
:raises exceptions.ItemAccessibilityException:
"""
raise exceptions.NotImplemented("Abstract interface Method")
# NOTE: this will be a large, inefficient list if filters are not passed in kwargs
# items = ModelManager.list( self, trans, **kwargs )
# return [ self.error_unless_accessible( trans, item, user ) for item in items ]
def METHOD_NAME(self, user, **kwargs):
"""
Return a list of items accessible to the user.
"""
raise exceptions.NotImplemented("Abstract interface Method")
# NOTE: this will be a large, inefficient list if filters are not passed in kwargs
# items = ModelManager.list( self, trans, **kwargs )
# return filter( lambda item: self.is_accessible( trans, item, user ), items )
class OwnableManagerMixin:
"""
A security interface to check if a User is an item's owner.
Some resources are associated with the User that created or imported them
and these Users can be considered the models' owner.
This can also be thought of as write/edit privileges.
"""
# declare what we are using from base ModelManager
model_class: Type[Any]
def by_id(self, id: int):
...
def is_owner(self, item: model.Base, user: Optional[model.User], **kwargs: Any) -> bool:
"""
Return True if user owns the item.
"""
# override in subclasses
raise exceptions.NotImplemented("Abstract interface Method")
def get_owned(self, id: int, user: Optional[model.User], **kwargs: Any) -> Any:
"""
Return the item with the given id if owned by the user,
otherwise raise an error.
:raises exceptions.ItemOwnershipException:
"""
item = self.by_id(id)
return self.error_unless_owner(item, user, **kwargs)
def error_unless_owner(self, item, user: Optional[model.User], **kwargs: Any):
"""
Raise an error if the item is NOT owned by user, otherwise return the item.
:raises exceptions.ItemAccessibilityException:
"""
if self.is_owner(item, user, **kwargs):
return item
raise exceptions.ItemOwnershipException(f"{self.model_class.__name__} is not owned by user")
def list_owned(self, user, **kwargs):
"""
Return a list of items owned by the user, raising an error if ANY
are not.
:raises exceptions.ItemAccessibilityException:
"""
raise exceptions.NotImplemented("Abstract interface Method")
# just alias to by_user (easier/same thing)
# return self.by_user( trans, user, **kwargs )
def filter_owned(self, user, **kwargs):
"""
Return a list of items owned by the user.
"""
# just alias to list_owned
return self.list_owned(user, **kwargs)
def get_mutable(self, id: int, user: Optional[model.User], **kwargs: Any) -> Any:
"""
Return the item with the given id if the user can mutate it,
otherwise raise an error. The user must be the owner of the item.
:raises exceptions.ItemOwnershipException:
"""
item = self.get_owned(id, user, **kwargs)
self.error_unless_mutable(item)
return item
def error_unless_mutable(self, item):
"""
Raise an error if the item is NOT mutable.
Items purged or archived are considered immutable.
:raises exceptions.ItemImmutableException:
"""
if getattr(item, "purged", False) or getattr(item, "archived", False):
raise exceptions.ItemImmutableException(f"{self.model_class.__name__} is immutable") | null |
1,120 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmartag.endpoint import endpoint_data
class ModifyFlowLogAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Smartag', '2018-03-13', 'ModifyFlowLogAttribute','smartag')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_NetflowVersion(self): # String
return self.get_query_params().get('NetflowVersion')
def set_NetflowVersion(self, NetflowVersion): # String
self.add_query_param('NetflowVersion', NetflowVersion)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_InactiveAging(self): # Integer
return self.get_query_params().get('InactiveAging')
def set_InactiveAging(self, InactiveAging): # Integer
self.add_query_param('InactiveAging', InactiveAging)
def METHOD_NAME(self): # String
return self.get_query_params().get('SlsRegionId')
def set_SlsRegionId(self, SlsRegionId): # String
self.add_query_param('SlsRegionId', SlsRegionId)
def get_ActiveAging(self): # Integer
return self.get_query_params().get('ActiveAging')
def set_ActiveAging(self, ActiveAging): # Integer
self.add_query_param('ActiveAging', ActiveAging)
def get_OutputType(self): # String
return self.get_query_params().get('OutputType')
def set_OutputType(self, OutputType): # String
self.add_query_param('OutputType', OutputType)
def get_ProjectName(self): # String
return self.get_query_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_query_param('ProjectName', ProjectName)
def get_LogstoreName(self): # String
return self.get_query_params().get('LogstoreName')
def set_LogstoreName(self, LogstoreName): # String
self.add_query_param('LogstoreName', LogstoreName)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_NetflowServerPort(self): # Integer
return self.get_query_params().get('NetflowServerPort')
def set_NetflowServerPort(self, NetflowServerPort): # Integer
self.add_query_param('NetflowServerPort', NetflowServerPort)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_NetflowServerIp(self): # String
return self.get_query_params().get('NetflowServerIp')
def set_NetflowServerIp(self, NetflowServerIp): # String
self.add_query_param('NetflowServerIp', NetflowServerIp)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_FlowLogId(self): # String
return self.get_query_params().get('FlowLogId')
def set_FlowLogId(self, FlowLogId): # String
self.add_query_param('FlowLogId', FlowLogId) | null |
1,121 | """NNCF Task of OTX Detection."""
# Copyright (C) 2022 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
from functools import partial
from typing import Optional
import otx.algorithms.detection.adapters.mmdet.nncf.patches # noqa: F401 # pylint: disable=unused-import
from otx.algorithms.common.tasks.nncf_task import NNCFBaseTask
from otx.algorithms.common.utils.logger import get_logger
from otx.algorithms.detection.adapters.mmdet.nncf import build_nncf_detector
from otx.algorithms.detection.adapters.mmdet.task import MMDetectionTask
from otx.algorithms.detection.adapters.mmdet.utils.config_utils import (
should_cluster_anchors,
)
from otx.api.entities.datasets import DatasetEntity
from otx.api.entities.inference_parameters import InferenceParameters
from otx.api.entities.model import ModelEntity
from otx.api.entities.optimization_parameters import OptimizationParameters
from otx.api.entities.resultset import ResultSetEntity
from otx.api.entities.subset import Subset
from otx.api.entities.task_environment import TaskEnvironment
from otx.api.usecases.evaluation.metrics_helper import MetricsHelper
logger = get_logger()
# pylint: disable=too-many-ancestors
class DetectionNNCFTask(NNCFBaseTask, MMDetectionTask):
"""DetectionNNCFTask."""
def __init__(self, task_environment: TaskEnvironment, output_path: Optional[str] = None):
super().__init__()
super(NNCFBaseTask, self).__init__(task_environment, output_path)
self._set_attributes_by_hyperparams()
def _init_task(self, dataset: Optional[DatasetEntity] = None, export: bool = False):
super(NNCFBaseTask, self)._init_task(dataset, export)
self._prepare_optimize(export)
def _prepare_optimize(self, export=False):
super()._prepare_optimize()
self.model_builder = partial(
self.model_builder,
nncf_model_builder=build_nncf_detector,
return_compression_ctrl=False,
is_export=export,
)
def _optimize(
self,
dataset: DatasetEntity,
optimization_parameters: Optional[OptimizationParameters] = None,
):
results = self._train_model(dataset)
return results
def _optimize_post_hook(
self,
dataset: DatasetEntity,
output_model: ModelEntity,
):
# get prediction on validation set
val_dataset = dataset.get_subset(Subset.VALIDATION)
val_preds, val_map = self._infer_model(val_dataset, InferenceParameters(is_evaluation=True))
preds_val_dataset = val_dataset.with_empty_annotations()
self._add_predictions_to_dataset(val_preds, preds_val_dataset, 0.0)
result_set = ResultSetEntity(
model=output_model,
ground_truth_dataset=val_dataset,
prediction_dataset=preds_val_dataset,
)
# adjust confidence threshold
if self._hyperparams.postprocessing.result_based_confidence_threshold:
best_confidence_threshold = None
logger.info("Adjusting the confidence threshold")
metric = MetricsHelper.compute_f_measure(result_set, vary_confidence_threshold=True)
if metric.best_confidence_threshold:
best_confidence_threshold = metric.best_confidence_threshold.value
if best_confidence_threshold is None:
raise ValueError("Cannot compute metrics: Invalid confidence threshold!")
logger.info(f"Setting confidence threshold to {best_confidence_threshold} based on results")
self.confidence_threshold = best_confidence_threshold
else:
metric = MetricsHelper.compute_f_measure(result_set, vary_confidence_threshold=False)
performance = metric.get_performance()
logger.info(f"Final model performance: {str(performance)}")
performance.dashboard_metrics.extend(
# pylint: disable-next=protected-access
self._generate_training_metrics(self._learning_curves, val_map)
)
output_model.performance = performance
def METHOD_NAME(self, modelinfo):
if self._recipe_cfg is not None and should_cluster_anchors(self._recipe_cfg):
modelinfo["anchors"] = {}
self._update_anchors(modelinfo["anchors"], self.config.model.bbox_head.anchor_generator)
modelinfo["confidence_threshold"] = self.confidence_threshold | null |
1,122 | from rest_framework import serializers as ser
from rest_framework import exceptions
from osf.exceptions import ValidationError
from osf.models import ApiOAuth2PersonalToken, ApiOAuth2Scope
from api.base.exceptions import format_validation_error
from api.base.serializers import JSONAPISerializer, LinksField, IDField, TypeField, RelationshipField, StrictVersion
from api.scopes.serializers import SCOPES_RELATIONSHIP_VERSION
class ApiOAuth2PersonalTokenSerializer(JSONAPISerializer):
"""Serialize data about a registered personal access token"""
def __init__(self, *args, **kwargs):
super(ApiOAuth2PersonalTokenSerializer, self).__init__(*args, **kwargs)
request = kwargs['context']['request']
# Dynamically adding scopes field here, depending on the version
if expect_scopes_as_relationships(request):
field = RelationshipField(
related_view='tokens:token-scopes-list',
related_view_kwargs={'_id': '<_id>'},
always_embed=True,
read_only=False,
)
self.fields['scopes'] = field
self.fields['owner'] = RelationshipField(
related_view='users:user-detail',
related_view_kwargs={'user_id': '<owner._id>'},
)
# Making scopes embeddable
self.context['embed']['scopes'] = self.context['view']._get_embed_partial('scopes', field)
else:
self.fields['scopes'] = ser.SerializerMethodField()
self.fields['owner'] = ser.SerializerMethodField()
id = IDField(source='_id', read_only=True, help_text='The object ID for this token (automatically generated)')
type = TypeField()
name = ser.CharField(
help_text='A short, descriptive name for this token',
required=True,
)
token_id = ser.CharField(read_only=True, allow_blank=True)
class Meta:
type_ = 'tokens'
links = LinksField({
'html': 'absolute_url',
})
def get_owner(self, obj):
return obj.owner._id
def METHOD_NAME(self, obj):
return ' '.join([scope.name for scope in obj.scopes.all()])
def absolute_url(self, obj):
return obj.absolute_url
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def to_representation(self, obj, envelope='data'):
data = super(ApiOAuth2PersonalTokenSerializer, self).to_representation(obj, envelope=envelope)
# Make sure users only see token_id on create
if not self.context['request'].method == 'POST':
if 'data' in data:
data['data']['attributes'].pop('token_id')
else:
data['attributes'].pop('token_id')
return data
def create(self, validated_data):
scopes = validate_requested_scopes(validated_data.pop('scopes', None))
if not scopes:
raise exceptions.ValidationError('Cannot create a token without scopes.')
instance = ApiOAuth2PersonalToken(**validated_data)
try:
instance.save()
except ValidationError as e:
detail = format_validation_error(e)
raise exceptions.ValidationError(detail=detail)
for scope in scopes:
instance.scopes.add(scope)
return instance
def update(self, instance, validated_data):
scopes = validate_requested_scopes(validated_data.pop('scopes', None))
assert isinstance(instance, ApiOAuth2PersonalToken), 'instance must be an ApiOAuth2PersonalToken'
instance.deactivate(save=False) # This will cause CAS to revoke the existing token but still allow it to be used in the future, new scopes will be updated properly at that time.
instance.reload()
for attr, value in validated_data.items():
if attr == 'token_id': # Do not allow user to update token_id
continue
else:
setattr(instance, attr, value)
if scopes:
update_scopes(instance, scopes)
try:
instance.save()
except ValidationError as e:
detail = format_validation_error(e)
raise exceptions.ValidationError(detail=detail)
return instance
class ApiOAuth2PersonalTokenWritableSerializer(ApiOAuth2PersonalTokenSerializer):
def __init__(self, *args, **kwargs):
super(ApiOAuth2PersonalTokenWritableSerializer, self).__init__(*args, **kwargs)
request = kwargs['context']['request']
# Dynamically overriding scopes field for early versions to make scopes writable via an attribute
if not expect_scopes_as_relationships(request):
self.fields['scopes'] = ser.CharField(write_only=True, required=False)
def to_representation(self, obj, envelope='data'):
"""
Overriding to_representation allows using different serializers for the request and response.
This will allow scopes to be a serializer method field if an early version, or a relationship field for a later version
"""
context = self.context
return ApiOAuth2PersonalTokenSerializer(instance=obj, context=context).data
def expect_scopes_as_relationships(request):
"""Whether serializer should expect scopes to be a relationship instead of an attribute
Scopes were previously an attribute on the serializer to mirror that they were a CharField on the model.
Now that scopes are an m2m field with tokens, later versions of the serializer represent scopes as relationships.
"""
return StrictVersion(getattr(request, 'version', '2.0')) >= StrictVersion(SCOPES_RELATIONSHIP_VERSION)
def update_scopes(token, scopes):
to_remove = token.scopes.difference(scopes)
to_add = scopes.difference(token.scopes.all())
for scope in to_remove:
token.scopes.remove(scope)
for scope in to_add:
token.scopes.add(scope)
return
def validate_requested_scopes(data):
if not data:
return []
if type(data) != list:
data = data.split(' ')
scopes = ApiOAuth2Scope.objects.filter(name__in=data)
if len(scopes) != len(data):
raise exceptions.NotFound('Scope names must be one of: {}.'.format(
', '.join(ApiOAuth2Scope.objects.values_list('name', flat=True)),
))
if scopes.filter(is_public=False):
raise exceptions.ValidationError('User requested invalid scope.')
return scopes | null |
1,123 | # Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
import errno
import os
import uuid
from contextlib import contextmanager
from errno import EACCES, ENOENT, EPERM, EROFS
from os.path import isfile, join, lexists
from shutil import rmtree
from stat import (
S_IRGRP,
S_IROTH,
S_IRUSR,
S_IRWXG,
S_IRWXO,
S_IRWXU,
S_IXGRP,
S_IXOTH,
S_IXUSR,
)
from tempfile import gettempdir
from unittest.mock import patch
import pytest
from conda.gateways.disk.update import touch
def create_temp_location():
tempdirdir = gettempdir()
dirname = str(uuid.uuid4())[:8]
return join(tempdirdir, dirname)
@contextmanager
def tempdir():
prefix = create_temp_location()
try:
os.makedirs(prefix)
yield prefix
finally:
if lexists(prefix):
rmtree(prefix, ignore_errors=False, onerror=_remove_read_only)
def _remove_read_only(func, path, exc):
excvalue = exc[1]
if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
os.chmod(path, S_IRWXU | S_IRWXG | S_IRWXO)
func(path)
else:
pass
def _make_read_only(path):
os.chmod(path, S_IRUSR | S_IRGRP | S_IROTH)
def _can_write_file(test, content):
try:
with open(test, "w+") as fh:
fh.write(content)
fh.close()
if os.stat(test).st_size == 0.0:
return False
else:
return True
except Exception as e:
eno = getattr(e, "errono", None)
if eno == 13:
return False
def _try_open(path):
try:
f = open(path, "a+")
except:
raise
else:
f.close()
def _can_execute(path):
return bool(os.stat(path).st_mode & (S_IXUSR | S_IXGRP | S_IXOTH))
def test_make_writable():
from conda.gateways.disk.permissions import make_writable
with tempdir() as td:
test_path = join(td, "test_path")
touch(test_path)
assert isfile(test_path)
_try_open(test_path)
_make_read_only(test_path)
pytest.raises((IOError, OSError), _try_open, test_path)
make_writable(test_path)
_try_open(test_path)
assert _can_write_file(test_path, "welcome to the ministry of silly walks")
os.remove(test_path)
assert not isfile(test_path)
def test_make_writable_doesnt_exist():
from conda.gateways.disk.permissions import make_writable
with pytest.raises((IOError, OSError)) as exc:
make_writable(join("some", "path", "that", "definitely", "doesnt", "exist"))
assert exc.value.errno == ENOENT
def test_make_writable_dir_EPERM():
import conda.gateways.disk.permissions
from conda.gateways.disk.permissions import make_writable
with patch.object(conda.gateways.disk.permissions, "chmod") as chmod_mock:
chmod_mock.side_effect = IOError(EPERM, "some message", "foo")
with tempdir() as td:
assert not make_writable(td)
def METHOD_NAME():
import conda.gateways.disk.permissions
from conda.gateways.disk.permissions import make_writable
with patch.object(conda.gateways.disk.permissions, "chmod") as chmod_mock:
chmod_mock.side_effect = IOError(EACCES, "some message", "foo")
with tempdir() as td:
assert not make_writable(td)
def test_make_writable_dir_EROFS():
import conda.gateways.disk.permissions
from conda.gateways.disk.permissions import make_writable
with patch.object(conda.gateways.disk.permissions, "chmod") as chmod_mock:
chmod_mock.side_effect = IOError(EROFS, "some message", "foo")
with tempdir() as td:
assert not make_writable(td)
def test_recursive_make_writable():
from conda.gateways.disk.permissions import recursive_make_writable
with tempdir() as td:
test_path = join(td, "test_path")
touch(test_path)
assert isfile(test_path)
_try_open(test_path)
_make_read_only(test_path)
pytest.raises((IOError, OSError), _try_open, test_path)
recursive_make_writable(test_path)
_try_open(test_path)
assert _can_write_file(test_path, "welcome to the ministry of silly walks")
os.remove(test_path)
assert not isfile(test_path)
def test_make_executable():
from conda.gateways.disk.permissions import make_executable
with tempdir() as td:
test_path = join(td, "test_path")
touch(test_path)
assert isfile(test_path)
_try_open(test_path)
_make_read_only(test_path)
assert not _can_write_file(test_path, "welcome to the ministry of silly walks")
assert not _can_execute(test_path)
make_executable(test_path) | null |
1,124 | import mock
import pytest
from django.db import IntegrityError
from framework.auth import Auth
from osf.models import Collection
from osf.exceptions import NodeStateError
from website.views import find_bookmark_collection
from .factories import (
UserFactory,
ProjectFactory,
BookmarkCollectionFactory,
CollectionFactory,
CollectionProviderFactory
)
from osf.utils.workflows import CollectionSubmissionStates
from website.mails import mails
from osf.models.collection_submission import mails as collection_submission_mail
pytestmark = pytest.mark.django_db
@pytest.fixture()
def user():
return UserFactory()
@pytest.fixture()
def auth(user):
return Auth(user)
# copied from tests/test_models.py
@pytest.mark.enable_bookmark_creation
class TestBookmarkCollection:
@pytest.fixture()
def collection(self, user):
return find_bookmark_collection(user)
def test_bookmark_collection_is_bookmark_collection(self, collection):
assert collection.is_bookmark_collection
assert isinstance(collection, Collection)
def test_cannot_remove_bookmark_collection(self, collection):
with pytest.raises(NodeStateError):
collection.delete()
def test_cannot_have_two_bookmark_collection(self, user, collection):
with pytest.raises(IntegrityError):
BookmarkCollectionFactory(creator=user)
def test_cannot_link_to_bookmark_collection(self, user, auth, collection):
new_node = ProjectFactory(creator=user)
with pytest.raises(ValueError):
new_node.add_pointer(collection, auth=auth)
def test_can_remove_empty_folder(self, user, auth):
new_folder = CollectionFactory(creator=user)
assert isinstance(new_folder, Collection)
new_folder.delete()
assert new_folder.deleted
def test_can_remove_root_folder_structure_without_cascading(self, user, auth):
outer_folder = CollectionFactory(creator=user)
assert isinstance(outer_folder, Collection)
inner_folder = CollectionFactory(creator=user)
assert isinstance(inner_folder, Collection)
outer_folder.collect_object(inner_folder, auth.user)
outer_folder.delete()
assert outer_folder.deleted
inner_folder.refresh_from_db()
assert not inner_folder.deleted
@pytest.mark.enable_bookmark_creation
class TestImplicitRemoval:
@pytest.fixture
def bookmark_collection(self, user):
return find_bookmark_collection(user)
@pytest.fixture
def user2(self):
return UserFactory()
@pytest.fixture
def alternate_bookmark_collection(self, user2):
return find_bookmark_collection(user2)
@pytest.fixture
def standard_collection(self):
return CollectionFactory()
@pytest.fixture
def collected_node(self, bookmark_collection, alternate_bookmark_collection, standard_collection):
node = ProjectFactory(creator=bookmark_collection.creator, is_public=True)
bookmark_collection.collect_object(node, bookmark_collection.creator)
alternate_bookmark_collection.collect_object(node, alternate_bookmark_collection.creator)
standard_collection.collect_object(node, standard_collection.creator)
return node
@pytest.fixture
def provider(self):
return CollectionProviderFactory()
@pytest.fixture
def provider_collection(self, provider):
return CollectionFactory(provider=provider)
@pytest.fixture
def METHOD_NAME(self, bookmark_collection, alternate_bookmark_collection, provider_collection):
node = ProjectFactory(creator=bookmark_collection.creator, is_public=True)
bookmark_collection.collect_object(node, bookmark_collection.creator)
alternate_bookmark_collection.collect_object(node, alternate_bookmark_collection.creator)
provider_collection.collect_object(node, provider_collection.creator)
return node
@mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits
def test_node_removed_from_collection_on_privacy_change(self, auth, collected_node, bookmark_collection):
associated_collections = collected_node.guids.first().collectionsubmission_set
assert associated_collections.count() == 3
collected_node.set_privacy('private', auth=auth)
assert associated_collections.filter(machine_state=CollectionSubmissionStates.REMOVED).count() == 2
assert associated_collections.exclude(machine_state=CollectionSubmissionStates.REMOVED).count() == 1
assert associated_collections.filter(collection=bookmark_collection).exists()
@mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits
def test_node_removed_from_collection_on_privacy_change_notify(self, auth, METHOD_NAME, bookmark_collection):
associated_collections = METHOD_NAME.guids.first().collectionsubmission_set
assert associated_collections.count() == 3
send_mail = mails.send_mail
with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send:
mock_send.side_effect = send_mail # implicitly test rendering
METHOD_NAME.set_privacy('private', auth=auth)
assert mock_send.called
assert len(mock_send.call_args_list) == 1
email1 = mock_send.call_args_list[0]
_, email1_kwargs = email1
assert {email1_kwargs['node'].id} == {METHOD_NAME.id}
expected_mail = mails.COLLECTION_SUBMISSION_REMOVED_PRIVATE(associated_collections.last().collection, METHOD_NAME)
assert {email1_kwargs['mail'].tpl_prefix} == {expected_mail.tpl_prefix}
@mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits
def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection):
associated_collections = collected_node.guids.first().collectionsubmission_set
assert associated_collections.count() == 3
send_mail = mails.send_mail
with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send:
mock_send.side_effect = send_mail # implicitly test rendering
collected_node.set_privacy('private', auth=auth)
assert not mock_send.called
def test_node_removed_from_collection_on_delete(self, collected_node, bookmark_collection, auth):
associated_collections = collected_node.guids.first().collectionsubmission_set
assert associated_collections.filter(machine_state=CollectionSubmissionStates.ACCEPTED).count() == 3
collected_node.remove_node(auth)
assert associated_collections.filter(machine_state=CollectionSubmissionStates.REMOVED).count() == 3 | null |
1,125 | # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This is an exact copy of
# https://github.com/huggingface/datasets/blob/3804442bb7cfcb9d52044d92688115cfdc69c2da/datasets/head_qa/head_qa.py
# with the exception of the `image` feature. This is to avoid adding `Pillow`
# as a dependency.
"""HEAD-QA: A Healthcare Dataset for Complex Reasoning."""
import json
import os
import datasets
_CITATION = """\
@inproceedings{vilares-gomez-rodriguez-2019-head,
title = "{HEAD}-{QA}: A Healthcare Dataset for Complex Reasoning",
author = "Vilares, David and
G{\'o}mez-Rodr{\'i}guez, Carlos",
booktitle = "Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/P19-1092",
doi = "10.18653/v1/P19-1092",
pages = "960--966",
abstract = "We present HEAD-QA, a multi-choice question answering testbed to encourage research on complex reasoning. The questions come from exams to access a specialized position in the Spanish healthcare system, and are challenging even for highly specialized humans. We then consider monolingual (Spanish) and cross-lingual (to English) experiments with information retrieval and neural techniques. We show that: (i) HEAD-QA challenges current methods, and (ii) the results lag well behind human performance, demonstrating its usefulness as a benchmark for future work.",
}
"""
_DESCRIPTION = """\
HEAD-QA is a multi-choice HEAlthcare Dataset. The questions come from exams to access a specialized position in the
Spanish healthcare system, and are challenging even for highly specialized humans. They are designed by the Ministerio
de Sanidad, Consumo y Bienestar Social.
The dataset contains questions about the following topics: medicine, nursing, psychology, chemistry, pharmacology and biology.
"""
_HOMEPAGE = "https://aghie.github.io/head-qa/"
_LICENSE = "MIT License"
_URL = "https://drive.google.com/uc?export=download&confirm=t&id=1a_95N5zQQoUCq8IBNVZgziHbeM-QxG2t"
_DIRS = {"es": "HEAD", "en": "HEAD_EN"}
class HeadQA(datasets.GeneratorBasedBuilder):
"""HEAD-QA: A Healthcare Dataset for Complex Reasoning"""
VERSION = datasets.Version("1.1.0")
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name="es", version=VERSION, description="Spanish HEAD dataset"
),
datasets.BuilderConfig(
name="en", version=VERSION, description="English HEAD dataset"
),
]
DEFAULT_CONFIG_NAME = "es"
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"name": datasets.Value("string"),
"year": datasets.Value("string"),
"category": datasets.Value("string"),
"qid": datasets.Value("int32"),
"qtext": datasets.Value("string"),
"ra": datasets.Value("int32"),
"answers": [
{
"aid": datasets.Value("int32"),
"atext": datasets.Value("string"),
}
],
}
),
supervised_keys=None,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
data_dir = dl_manager.download_and_extract(_URL)
dir = _DIRS[self.config.name]
data_lang_dir = os.path.join(data_dir, dir)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"data_dir": data_dir,
"filepath": os.path.join(data_lang_dir, f"train_{dir}.json"),
},
),
datasets.SplitGenerator(
name=datasets.Split.TEST,
gen_kwargs={
"data_dir": data_dir,
"filepath": os.path.join(data_lang_dir, f"test_{dir}.json"),
},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
gen_kwargs={
"data_dir": data_dir,
"filepath": os.path.join(data_lang_dir, f"dev_{dir}.json"),
},
),
]
def METHOD_NAME(self, data_dir, filepath):
"""Yields examples."""
with open(filepath, encoding="utf-8") as f:
head_qa = json.load(f)
for exam_id, exam in enumerate(head_qa["exams"]):
content = head_qa["exams"][exam]
name = content["name"].strip()
year = content["year"].strip()
category = content["category"].strip()
for question in content["data"]:
qid = int(question["qid"].strip())
qtext = question["qtext"].strip()
ra = int(question["ra"].strip())
aids = [answer["aid"] for answer in question["answers"]]
atexts = [answer["atext"].strip() for answer in question["answers"]]
answers = [
{"aid": aid, "atext": atext} for aid, atext in zip(aids, atexts)
]
id_ = f"{exam_id}_{qid}"
yield id_, {
"name": name,
"year": year,
"category": category,
"qid": qid,
"qtext": qtext,
"ra": ra,
"answers": answers,
} | null |
1,126 | #!/usr/bin/env python
# general imports
from numpy import *
from random import uniform
# imp general
import IMP
# our project
from IMP.isd import Scale, vonMisesKappaJeffreysRestraint
# unit testing framework
import IMP.test
class Tests(IMP.test.TestCase):
def setUp(self):
IMP.test.TestCase.setUp(self)
# IMP.set_log_level(IMP.MEMORY)
IMP.set_log_level(0)
self.m = IMP.Model()
self.kappa = Scale.setup_particle(IMP.Particle(self.m), 1.0)
self.DA = IMP.DerivativeAccumulator()
self.J = IMP.isd.vonMisesKappaJeffreysRestraint(self.m, self.kappa)
def METHOD_NAME(self):
"Test vonMisesKappaJeffreys probability"
try:
from scipy.special import i0, i1
except ImportError:
self.skipTest("this test requires the scipy Python module")
for i in range(100):
no = uniform(0.1, 100)
self.kappa.set_scale(no)
ratio = i1(no) / i0(no)
self.assertAlmostEqual(self.J.get_probability(),
sqrt(
ratio * (
no - ratio - no * ratio * ratio)),
delta=0.001)
def testValueE(self):
"Test if vonMisesKappaJeffreys score is log(scale)"
try:
from scipy.special import i0, i1
except ImportError:
self.skipTest("this test requires the scipy Python module")
for i in range(100):
no = uniform(0.1, 100)
self.kappa.set_scale(no)
ratio = i1(no) / i0(no)
self.assertAlmostEqual(self.J.unprotected_evaluate(None),
-0.5 *
log(ratio *
(no - ratio - no * ratio * ratio)),
delta=0.001)
def testDerivative(self):
"Test the derivative of vonMisesKappaJeffreysRestraint"
try:
from scipy.special import i0, i1
except ImportError:
self.skipTest("this test requires the scipy Python module")
sf = IMP.core.RestraintsScoringFunction([self.J])
for i in range(100):
no = uniform(0.1, 100)
self.kappa.set_scale(no)
sf.evaluate(True)
ratio = i1(no) / i0(no)
self.assertAlmostEqual(self.kappa.get_scale_derivative(),
0.5 *
(-1 / ratio + 3 * ratio + 1 / no + 1 / (
no - no ** 2 / ratio + ratio * no ** 2)),
delta=0.001)
def test_get_inputs(self):
"Test vonMisesKappaJeffreysRestraint::get_inputs()"
self.assertEqual([x.get_name() for x in self.J.get_inputs()],
[self.kappa.get_name()])
def testNonzeroE(self):
"vonMisesKappaKappaJeffreys errors on evaluate with zero scale"
self.kappa.set_scale(0.0)
self.assertRaises(
IMP.ModelException,
self.J.unprotected_evaluate,
self.DA)
def testNegativeE(self):
"vonMisesKappaKappaJeffreys errors on evaluate with negative scale"
self.kappa.set_scale(-1.0)
self.assertRaises(
IMP.ModelException,
self.J.unprotected_evaluate,
self.DA)
def testNonzeroP(self):
"Test vonMisesKappaKappaJeffreys get_prob with zero scale"
self.kappa.set_scale(0.0)
self.assertRaises(IMP.ModelException, self.J.get_probability)
def testNegativeP(self):
"Test vonMisesKappaKappaJeffreys get_prob with negative scale"
self.kappa.set_scale(-1.0)
self.assertRaises(IMP.ModelException, self.J.get_probability)
def testSanityEP(self):
"Test if vonMisesKappaJeffreys score is -log(prob)"
for i in range(100):
no = uniform(0.1, 100)
self.kappa.set_scale(no)
self.assertAlmostEqual(self.J.unprotected_evaluate(self.DA),
-log(self.J.get_probability()))
def testSanityPE(self):
"Test if vonMisesKappaJeffreys prob is exp(-score)"
for i in range(100):
no = uniform(0.1, 100)
self.kappa.set_scale(no)
self.assertAlmostEqual(self.J.get_probability(),
exp(-self.J.unprotected_evaluate(self.DA)))
if __name__ == '__main__':
IMP.test.main() | null |
1,127 | # Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import annotations
import json
import uuid
from pathlib import Path
from typing import Iterable
import pytest
from pytest import MonkeyPatch
from conda.base.context import reset_context
from conda.common.io import env_vars
from conda.plugins.subcommands.doctor.health_checks import (
display_health_checks,
find_altered_packages,
find_packages_with_missing_files,
)
from conda.testing.integration import make_temp_env
@pytest.fixture
def env_ok(tmp_path: Path) -> Iterable[tuple[Path, str, str, str]]:
"""Fixture that returns a testing environment with no missing files"""
package = uuid.uuid4().hex
(tmp_path / "bin").mkdir(parents=True, exist_ok=True)
(tmp_path / "lib").mkdir(parents=True, exist_ok=True)
(tmp_path / "conda-meta").mkdir(parents=True, exist_ok=True)
bin_doctor = f"bin/{package}"
(tmp_path / bin_doctor).touch()
lib_doctor = f"lib/{package}.py"
(tmp_path / lib_doctor).touch()
# A template json file mimicking a json file in conda-meta
# the "sha256" and "sha256_in_prefix" values are sha256 checksum generated for an empty file
PACKAGE_JSON = {
"files": [
bin_doctor,
lib_doctor,
],
"paths_data": {
"paths": [
{
"_path": bin_doctor,
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"sha256_in_prefix": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
},
{
"_path": lib_doctor,
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"sha256_in_prefix": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
},
],
"paths_version": 1,
},
}
(tmp_path / "conda-meta" / f"{package}.json").write_text(json.dumps(PACKAGE_JSON))
yield tmp_path, bin_doctor, lib_doctor, package
@pytest.fixture
def env_missing_files(env_ok: tuple[Path, str, str, str]) -> tuple[Path, str, str, str]:
"""Fixture that returns a testing environment with missing files"""
prefix, bin_doctor, _, _ = env_ok
(prefix / bin_doctor).unlink() # file bin_doctor becomes "missing"
return env_ok
@pytest.fixture
def env_altered_files(env_ok: tuple[Path, str, str, str]) -> tuple[Path, str, str, str]:
"""Fixture that returns a testing environment with altered files"""
prefix, _, lib_doctor, _ = env_ok
# Altering the lib_doctor.py file so that it's sha256 checksum will change
with open(prefix / lib_doctor, "w") as f:
f.write("print('Hello, World!')")
return env_ok
def test_no_missing_files(env_ok: tuple[Path, str, str, str]):
"""Test that runs for the case with no missing files"""
prefix, _, _, _ = env_ok
assert find_packages_with_missing_files(prefix) == {}
def test_missing_files(env_missing_files: tuple[Path, str, str, str]):
prefix, bin_doctor, _, package = env_missing_files
assert find_packages_with_missing_files(prefix) == {package: [bin_doctor]}
def test_no_altered_files(env_ok: tuple[Path, str, str, str]):
"""Test that runs for the case with no altered files"""
prefix, _, _, _ = env_ok
assert find_altered_packages(prefix) == {}
def METHOD_NAME(env_altered_files: tuple[Path, str, str, str]):
prefix, _, lib_doctor, package = env_altered_files
assert find_altered_packages(prefix) == {package: [lib_doctor]}
def test_json_keys_missing(env_ok: tuple[Path, str, str, str], capsys):
"""Test that runs for the case with empty json"""
prefix, _, _, package = env_ok
file = prefix / "conda-meta" / f"{package}.json"
with open(file) as f:
data = json.load(f)
del data["paths_data"]
with open(file, "w") as f:
json.dump(data, f)
assert find_altered_packages(prefix) == {}
def test_wrong_path_version(env_ok: tuple[Path, str, str, str]):
"""Test that runs for the case when path_version is not equal to 1"""
prefix, _, _, package = env_ok
file = prefix / "conda-meta" / f"{package}.json"
with open(file) as f:
data = json.load(f)
data["paths_data"]["paths_version"] = 2
with open(file, "w") as f:
json.dump(data, f)
assert find_altered_packages(prefix) == {}
def test_json_cannot_be_loaded(env_ok: tuple[Path, str, str, str]):
"""Test that runs for the case when json file is missing"""
prefix, _, _, package = env_ok
# passing a None type to json.loads() so that it fails
package = None
assert find_altered_packages(prefix) == {}
@pytest.mark.parametrize("verbose", [True, False])
def test_display_health_checks(
env_ok: tuple[Path, str, str, str], verbose: bool, capsys, monkeypatch: MonkeyPatch
):
"""Test that runs display_health_checks without missing or altered files."""
prefix, bin_doctor, lib_doctor, package = env_ok
monkeypatch.setenv("CONDA_PREFIX", str(prefix))
reset_context()
display_health_checks(prefix, verbose=verbose)
captured = capsys.readouterr()
assert "There are no packages with missing files." in captured.out
assert "There are no packages with altered files." in captured.out
@pytest.mark.parametrize("verbose", [True, False])
def test_display_health_checks_missing_files(
env_missing_files: tuple[Path, str, str, str],
verbose: bool,
capsys,
monkeypatch: MonkeyPatch,
):
"""Test that runs display_health_checks with missing files"""
prefix, bin_doctor, _, package = env_missing_files
monkeypatch.setenv("CONDA_PREFIX", str(prefix))
reset_context()
display_health_checks(prefix, verbose=verbose)
captured = capsys.readouterr()
if verbose:
assert str(bin_doctor) in captured.out
else:
assert f"{package}: 1" in captured.out
@pytest.mark.parametrize("verbose", [True, False])
def test_display_health_checks_altered_files(
env_altered_files: tuple[Path, str, str, str],
verbose: bool,
capsys,
monkeypatch: MonkeyPatch,
):
"""Test that runs display_health_checks with altered files"""
prefix, _, lib_doctor, package = env_altered_files
monkeypatch.setenv("CONDA_PREFIX", str(prefix))
reset_context()
display_health_checks(prefix, verbose=verbose)
captured = capsys.readouterr()
if verbose:
assert str(lib_doctor) in captured.out
else:
assert f"{package}: 1" in captured.out | null |
1,128 | from itertools import chain
from math import isnan
from numbers import Real, Integral
import numpy as np
from Orange.data import Value, Unknown, DiscreteVariable
__all__ = ["Instance"]
class Instance:
def __init__(self, domain, data=None, id=None):
"""
Construct a new data instance.
:param domain: domain that describes the instance's variables
:type domain: Orange.data.Domain
:param data: instance's values
:type data: Orange.data.Instance or a sequence of values
:param id: instance id
:type id: hashable value
"""
if data is None and isinstance(domain, Instance):
data = domain
domain = data.domain
self._domain = domain
if data is None:
self._x = np.repeat(Unknown, len(domain.attributes))
self._y = np.repeat(Unknown, len(domain.class_vars))
self._metas = np.array([var.Unknown for var in domain.metas],
dtype=object)
self._weight = 1
elif isinstance(data, Instance) and data.domain == domain:
self._x = np.array(data._x)
self._y = np.atleast_1d(np.array(data._y))
self._metas = np.array(data._metas)
self._weight = data._weight
else:
self._x, self._y, self._metas = domain.convert(data)
self._y = np.atleast_1d(self._y)
self._weight = 1
if id is not None:
self.id = id
else:
from Orange.data import Table
self.id = Table.new_id()
@property
def domain(self):
"""The domain describing the instance's values."""
return self._domain
@property
def METHOD_NAME(self):
"""
Instance's attributes as a 1-dimensional numpy array whose length
equals `len(self.domain.attributes)`.
"""
return self._x
@property
def y(self):
"""
Instance's classes as a 1-dimensional numpy array whose length
equals `len(self.domain.attributes)`.
"""
return self._y
@property
def metas(self):
"""
Instance's meta attributes as a 1-dimensional numpy array whose length
equals `len(self.domain.attributes)`.
"""
return self._metas
@property
def list(self):
"""
All instance's values, including attributes, classes and meta
attributes, as a list whose length equals `len(self.domain.attributes)
+ len(self.domain.class_vars) + len(self.domain.metas)`.
"""
n_self, n_metas = len(self), len(self._metas)
return [self[i].value if i < n_self else self[n_self - i - 1].value
for i in range(n_self + n_metas)]
@property
def weight(self):
"""The weight of the data instance. Default is 1."""
return self._weight
@weight.setter
def weight(self, weight):
self._weight = weight
def __setitem__(self, key, value):
if not isinstance(key, Integral):
key = self._domain.index(key)
value = self._domain[key].to_val(value)
if key >= 0 and not isinstance(value, (int, float)):
raise TypeError("Expected primitive value, got '%s'" %
type(value).__name__)
if 0 <= key < len(self._domain.attributes):
self._x[key] = value
elif len(self._domain.attributes) <= key:
self._y[key - len(self.domain.attributes)] = value
else:
self._metas[-1 - key] = value
def __getitem__(self, key):
idx = key if isinstance(key, Integral) else self._domain.index(key)
if 0 <= idx < len(self._domain.attributes):
value = self._x[idx]
elif idx >= len(self._domain.attributes):
if self._y.ndim == 0:
value = self._y
else:
value = self._y[idx - len(self.domain.attributes)]
else:
value = self._metas[-1 - idx]
var = self._domain[idx]
if isinstance(key, DiscreteVariable) and var is not key:
value = key.get_mapper_from(var)(value)
var = key
return Value(var, value)
#TODO Should we return an instance of `object` if we have a meta attribute
# that is not Discrete or Continuous? E.g. when we have strings, we'd
# like to be able to use startswith, lower etc...
# Or should we even return Continuous as floats and use Value only
# for discrete attributes?!
# Same in Table.__getitem__
@staticmethod
def str_values(data, variables, limit=True):
if limit:
s = ", ".join(var.str_val(val)
for var, val in zip(variables, data[:5]))
if len(data) > 5:
s += ", ..."
return s
else:
return ", ".join(var.str_val(val)
for var, val in zip(variables, data))
def _str(self, limit):
s = "[" + self.str_values(self._x, self._domain.attributes, limit)
if self._domain.class_vars:
s += " | " + \
self.str_values(self._y, self._domain.class_vars, limit)
s += "]"
if self._domain.metas:
s += " {" + \
self.str_values(self._metas, self._domain.metas, limit) + \
"}"
return s
def __str__(self):
return self._str(False)
def __repr__(self):
return self._str(True)
def __eq__(self, other):
if not isinstance(other, Instance):
other = Instance(self._domain, other)
def same(x1, x2):
nan1 = np.isnan(x1)
nan2 = np.isnan(x2)
return np.array_equal(nan1, nan2) and \
np.array_equal(x1[~nan1], x2[~nan2])
return same(self._x, other._x) and same(self._y, other._y) \
and all(m1 == m2 or
type(m1) == type(m2) == float and isnan(m1) and isnan(m2)
for m1, m2 in zip(self._metas, other._metas))
@classmethod
def __hash__(cls):
raise TypeError(f"unhashable type: '{type(cls.__name__)}'")
def __iter__(self):
return chain(iter(self._x), iter(self._y))
def values(self):
return (Value(var, val)
for var, val in zip(self.domain.variables, self))
def __len__(self):
return len(self._x) + len(self._y)
def attributes(self):
"""Return iterator over the instance's attributes"""
return iter(self._x)
def classes(self):
"""Return iterator over the instance's class attributes"""
return iter(self._y)
# A helper function for get_class and set_class
def _check_single_class(self):
if not self._domain.class_vars:
raise TypeError("Domain has no class variable")
elif len(self._domain.class_vars) > 1:
raise TypeError("Domain has multiple class variables")
def get_class(self):
"""
Return the class value as an instance of :obj:`Orange.data.Value`.
Throws an exception if there are multiple classes.
"""
self._check_single_class()
return Value(self._domain.class_var, self._y[0])
def get_classes(self):
"""
Return the class value as a list of instances of
:obj:`Orange.data.Value`.
"""
return (Value(var, value)
for var, value in zip(self._domain.class_vars, self._y))
def set_class(self, value):
"""
Set the instance's class. Throws an exception if there are multiple
classes.
"""
self._check_single_class()
if not isinstance(value, Real):
self._y[0] = self._domain.class_var.to_val(value)
else:
self._y[0] = value | null |
1,129 | import glob
import os
from datetime import datetime
import re
import codecs
import copy
from pipeline.api import PipelineAPI
from pipeline.log.logger import LocalLogger, TaskLogger, RunLogger
DTS_LAST_RECORD_FILE = 'last_record.txt'
DTS_EMAIL_TEMPLATE = 'template.html'
DTS_LOGS_FILES = os.getenv('CP_DTS_LOGS_FILES', 'dts*.*')
DATE_TIME_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
dts_user = os.getenv('CP_DTS_LOG_NOTIFICATION_USER')
if not dts_user:
raise RuntimeError('CP_DTS_LOG_NOTIFICATION_USER is not defined!')
dts_users_copy = os.getenv('CP_DTS_LOG_NOTIFICATION_USERS_COPY', None)
dts_users_copy_list = [cc_user.strip() for cc_user in dts_users_copy.split(
",")] if dts_users_copy else []
dts_notify_subject = os.getenv(
'CP_DTS_LOG_NOTIFICATION_SUBJECT', 'DTS logs files has errors')
dts_path = os.getenv('CP_DTS_LOG_CHECKER_SYSTEM_DIR')
if not dts_path or not os.path.isdir(dts_path):
raise RuntimeError(
"CP_DTS_LOG_CHECKER_SYSTEM_DIR is not defined or doesn't exist on the system!")
dts_file = os.path.join(dts_path, DTS_LAST_RECORD_FILE)
dts_logs_path = os.getenv('CP_DTS_LOGS_DIR')
if not dts_logs_path or not os.path.isdir(dts_logs_path):
raise RuntimeError('CP_DTS_LOGS_DIR is not defined!')
dts_logs_files = glob.glob(os.path.join(dts_logs_path, DTS_LOGS_FILES))
dts_log_url = os.getenv('CP_DTS_LOG_URL_TEMPLATE')
dts_token = os.getenv('API_TOKEN')
dts_newest_log_file = max(glob.iglob(os.path.join(
dts_logs_path, DTS_LOGS_FILES)), key=os.path.getmtime)
email_template_file = os.path.join(dts_path, DTS_EMAIL_TEMPLATE)
pattern = os.getenv('CP_DTS_LOG_MESSAGE_PATTERN', r'/*ERROR./*')
run_id = os.getenv('RUN_ID', default='0')
pipeline_name = os.getenv('PIPELINE_NAME', default='pipeline')
runs_root = os.getenv('CP_RUNS_ROOT_DIR', default='/runs')
run_dir = os.getenv('RUN_DIR', default=os.path.join(
runs_root, pipeline_name + '-' + run_id))
log_dir = os.getenv('LOG_DIR', default=os.path.join(run_dir, 'logs'))
pipeline_api = os.getenv('API')
api = PipelineAPI(api_url=pipeline_api, log_dir=log_dir)
logger = RunLogger(api=api, run_id=run_id)
logger = TaskLogger(task='DTSLogCheck', inner=logger)
logger = LocalLogger(inner=logger)
def get_email_body(email_template_file):
body = {}
if os.path.exists(email_template_file):
with codecs.open(email_template_file, "r", "utf-8") as html_file:
body = html_file.read()
else:
body = """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<style>
table,
td {{
border: 1px solid black;
border-collapse: collapse;
padding: 5px;
}}
</style>
</head>
<body>
<p>Dear user,<br>
*** This is a system generated email, do not reply to this email ***<br>
<b>DTS log files with errors:</b><br>
{}<br>
Best regards,
EPM-AWS-Dev Platform<br>
</body>
</html>
"""
return body
def generate_table(files_with_errors):
def get_table_header(dts_log_url):
if dts_log_url:
return """
<tr>
<td><b>Files</b></td>
<td><b>Access link</b></td>
</tr>"""
else:
return """
<tr>
<td><b>Files</b></td>
</tr>"""
def get_table_row(dts_log_url, file):
if dts_log_url:
file_link = dts_log_url + \
os.path.basename(file) + '&contentDisposition=ATTACHMENT'
return '<td>{}</td><td><a href="{}">Link</a></td>'.format(file, file_link)
else:
return '<td>{}</td>'.format(file)
table = "<table>" + get_table_header(dts_log_url)
for file in files_with_errors:
table += "<tr>" + get_table_row(dts_log_url, file) + "</tr>"
table += "</table>"
return table
def retrieve_timestamp(str):
match_str = re.search(
'(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2}).(\d{3})', str)
if match_str is not None and match_str.group():
return datetime.strptime(match_str.group(), DATE_TIME_FORMAT)
else:
logger.warn("Can't find an applicable date in string: {}".format(str))
def retrieve_last_results_info():
if os.path.exists(dts_file) and os.path.getsize(dts_file) > 0:
with open(dts_file) as f:
file_row = f.read()
dates = file_row.split(',')
script_runtime = retrieve_timestamp(dates[0])
last_row_number = dates[1]
return script_runtime, last_row_number
else:
logger.info('There is no information about previous results')
return None, None
def METHOD_NAME(script_runtime):
changed_files = []
for file in dts_logs_files:
mtime = os.path.getmtime(file)
last_modification_date = datetime.fromtimestamp(mtime)
if not script_runtime or last_modification_date > script_runtime:
changed_files.append(file)
return changed_files
def file_contains_error(filename, line_to_start):
with open(filename, 'r') as f:
count = 0
for line in f:
if count < int(line_to_start):
count += 1
continue
if re.search(pattern, line):
return True
def save_last_record_file(dts_file, dts_newest_log_file, logger):
with open(dts_newest_log_file, 'r') as f:
latest_row_number = len(f.readlines())
with open(dts_file, 'w+') as f:
f.write(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
[:-3] + (',') + str(latest_row_number))
logger.success("DTS file has been updated")
script_runtime, last_row_number = retrieve_last_results_info()
changed_files = METHOD_NAME(script_runtime)
files_with_errors = set()
if not changed_files:
logger.info('No new files to check')
else:
sorted_changed_files = sorted(
changed_files, key=os.path.getmtime, reverse=False)
logger.info('There are new files, a check is needed')
files_size = len(sorted_changed_files)
for index in range(0, files_size):
row_number = 0
if index == 0 and last_row_number:
row_number = int(last_row_number)
if file_contains_error(sorted_changed_files[index], row_number):
files_with_errors.add(sorted_changed_files[index])
table = generate_table(files_with_errors)
body = get_email_body(email_template_file)
if len(files_with_errors) != 0:
logger.info("There are: {} logs files with errors".format(
len(files_with_errors)))
api.create_notification(dts_notify_subject, body.format(
table), dts_user, dts_users_copy_list)
recipients = copy.deepcopy(dts_users_copy_list)
recipients.append(dts_user)
logger.info("Message was sent to {}".format(str(recipients)))
else:
logger.info("DTS logs have no errors, nothing to report")
save_last_record_file(dts_file, dts_newest_log_file, logger) | null |
1,130 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdataworks_public.endpoint import endpoint_data
class CreateTableRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'dataworks-public', '2020-05-18', 'CreateTable')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Schema(self): # String
return self.get_query_params().get('Schema')
def set_Schema(self, Schema): # String
self.add_query_param('Schema', Schema)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def METHOD_NAME(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Columnss(self): # RepeatList
return self.get_body_params().get('Columns')
def set_Columnss(self, Columns): # RepeatList
for depth1 in range(len(Columns)):
if Columns[depth1].get('SeqNumber') is not None:
self.add_body_params('Columns.' + str(depth1 + 1) + '.SeqNumber', Columns[depth1].get('SeqNumber'))
if Columns[depth1].get('IsPartitionCol') is not None:
self.add_body_params('Columns.' + str(depth1 + 1) + '.IsPartitionCol', Columns[depth1].get('IsPartitionCol'))
if Columns[depth1].get('ColumnNameCn') is not None:
self.add_body_params('Columns.' + str(depth1 + 1) + '.ColumnNameCn', Columns[depth1].get('ColumnNameCn'))
if Columns[depth1].get('Length') is not None:
self.add_body_params('Columns.' + str(depth1 + 1) + '.Length', Columns[depth1].get('Length'))
if Columns[depth1].get('Comment') is not None:
self.add_body_params('Columns.' + str(depth1 + 1) + '.Comment', Columns[depth1].get('Comment'))
if Columns[depth1].get('ColumnName') is not None:
self.add_body_params('Columns.' + str(depth1 + 1) + '.ColumnName', Columns[depth1].get('ColumnName'))
if Columns[depth1].get('ColumnType') is not None:
self.add_body_params('Columns.' + str(depth1 + 1) + '.ColumnType', Columns[depth1].get('ColumnType'))
def get_LifeCycle(self): # Integer
return self.get_query_params().get('LifeCycle')
def set_LifeCycle(self, LifeCycle): # Integer
self.add_query_param('LifeCycle', LifeCycle)
def get_Themess(self): # RepeatList
return self.get_body_params().get('Themes')
def set_Themess(self, Themes): # RepeatList
for depth1 in range(len(Themes)):
if Themes[depth1].get('ThemeLevel') is not None:
self.add_body_params('Themes.' + str(depth1 + 1) + '.ThemeLevel', Themes[depth1].get('ThemeLevel'))
if Themes[depth1].get('ThemeId') is not None:
self.add_body_params('Themes.' + str(depth1 + 1) + '.ThemeId', Themes[depth1].get('ThemeId'))
def get_LogicalLevelId(self): # Long
return self.get_query_params().get('LogicalLevelId')
def set_LogicalLevelId(self, LogicalLevelId): # Long
self.add_query_param('LogicalLevelId', LogicalLevelId)
def get_Endpoint(self): # String
return self.get_body_params().get('Endpoint')
def set_Endpoint(self, Endpoint): # String
self.add_body_params('Endpoint', Endpoint)
def get_EnvType(self): # Integer
return self.get_body_params().get('EnvType')
def set_EnvType(self, EnvType): # Integer
self.add_body_params('EnvType', EnvType)
def get_HasPart(self): # Integer
return self.get_query_params().get('HasPart')
def set_HasPart(self, HasPart): # Integer
self.add_query_param('HasPart', HasPart)
def get_TableName(self): # String
return self.get_query_params().get('TableName')
def set_TableName(self, TableName): # String
self.add_query_param('TableName', TableName)
def get_AppGuid(self): # String
return self.get_query_params().get('AppGuid')
def set_AppGuid(self, AppGuid): # String
self.add_query_param('AppGuid', AppGuid)
def get_ProjectId(self): # Long
return self.get_query_params().get('ProjectId')
def set_ProjectId(self, ProjectId): # Long
self.add_query_param('ProjectId', ProjectId)
def get_CategoryId(self): # Long
return self.get_query_params().get('CategoryId')
def set_CategoryId(self, CategoryId): # Long
self.add_query_param('CategoryId', CategoryId)
def get_Visibility(self): # Integer
return self.get_query_params().get('Visibility')
def set_Visibility(self, Visibility): # Integer
self.add_query_param('Visibility', Visibility)
def get_PhysicsLevelId(self): # Long
return self.get_query_params().get('PhysicsLevelId')
def set_PhysicsLevelId(self, PhysicsLevelId): # Long
self.add_query_param('PhysicsLevelId', PhysicsLevelId)
def get_OwnerId(self): # String
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # String
self.add_query_param('OwnerId', OwnerId)
def get_IsView(self): # Integer
return self.get_query_params().get('IsView')
def set_IsView(self, IsView): # Integer
self.add_query_param('IsView', IsView)
def get_ExternalTableType(self): # String
return self.get_query_params().get('ExternalTableType')
def set_ExternalTableType(self, ExternalTableType): # String
self.add_query_param('ExternalTableType', ExternalTableType)
def get_Location(self): # String
return self.get_query_params().get('Location')
def set_Location(self, Location): # String
self.add_query_param('Location', Location)
def get_Comment(self): # String
return self.get_query_params().get('Comment')
def set_Comment(self, Comment): # String
self.add_query_param('Comment', Comment) | null |
1,131 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CreateImportJobRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hcs-mgw', '2017-10-24', 'CreateImportJob')
self.set_method('POST')
def get_IsCustomizedInstance(self):
return self.get_query_params().get('IsCustomizedInstance')
def set_IsCustomizedInstance(self,IsCustomizedInstance):
self.add_query_param('IsCustomizedInstance',IsCustomizedInstance)
def get_DestAddressRegionId(self):
return self.get_query_params().get('DestAddressRegionId')
def set_DestAddressRegionId(self,DestAddressRegionId):
self.add_query_param('DestAddressRegionId',DestAddressRegionId)
def get_DestBucket(self):
return self.get_query_params().get('DestBucket')
def set_DestBucket(self,DestBucket):
self.add_query_param('DestBucket',DestBucket)
def get_DestPrefix(self):
return self.get_query_params().get('DestPrefix')
def set_DestPrefix(self,DestPrefix):
self.add_query_param('DestPrefix',DestPrefix)
def get_TotalObjectNum(self):
return self.get_query_params().get('TotalObjectNum')
def set_TotalObjectNum(self,TotalObjectNum):
self.add_query_param('TotalObjectNum',TotalObjectNum)
def get_CustomizedInstances(self):
return self.get_query_params().get('CustomizedInstances')
def set_CustomizedInstances(self,CustomizedInstances):
self.add_query_param('CustomizedInstances',CustomizedInstances)
def get_IncrementalInterval(self):
return self.get_query_params().get('IncrementalInterval')
def set_IncrementalInterval(self,IncrementalInterval):
self.add_query_param('IncrementalInterval',IncrementalInterval)
def get_NetFlowLimiter(self):
return self.get_query_params().get('NetFlowLimiter')
def METHOD_NAME(self,NetFlowLimiter):
self.add_query_param('NetFlowLimiter',NetFlowLimiter)
def get_SrcListFilePath(self):
return self.get_query_params().get('SrcListFilePath')
def set_SrcListFilePath(self,SrcListFilePath):
self.add_query_param('SrcListFilePath',SrcListFilePath)
def get_DestAccessKeyId(self):
return self.get_query_params().get('DestAccessKeyId')
def set_DestAccessKeyId(self,DestAccessKeyId):
self.add_query_param('DestAccessKeyId',DestAccessKeyId)
def get_SrcSubAddress(self):
return self.get_query_params().get('SrcSubAddress')
def set_SrcSubAddress(self,SrcSubAddress):
self.add_query_param('SrcSubAddress',SrcSubAddress)
def get_TotalObjectSize(self):
return self.get_query_params().get('TotalObjectSize')
def set_TotalObjectSize(self,TotalObjectSize):
self.add_query_param('TotalObjectSize',TotalObjectSize)
def get_SrcDomain(self):
return self.get_query_params().get('SrcDomain')
def set_SrcDomain(self,SrcDomain):
self.add_query_param('SrcDomain',SrcDomain)
def get_SrcAddressRegionId(self):
return self.get_query_params().get('SrcAddressRegionId')
def set_SrcAddressRegionId(self,SrcAddressRegionId):
self.add_query_param('SrcAddressRegionId',SrcAddressRegionId)
def get_IncrementalMode(self):
return self.get_query_params().get('IncrementalMode')
def set_IncrementalMode(self,IncrementalMode):
self.add_query_param('IncrementalMode',IncrementalMode)
def get_SrcAddressType(self):
return self.get_query_params().get('SrcAddressType')
def set_SrcAddressType(self,SrcAddressType):
self.add_query_param('SrcAddressType',SrcAddressType)
def get_DestAccessKeySecret(self):
return self.get_query_params().get('DestAccessKeySecret')
def set_DestAccessKeySecret(self,DestAccessKeySecret):
self.add_query_param('DestAccessKeySecret',DestAccessKeySecret)
def get_SrcAppid(self):
return self.get_query_params().get('SrcAppid')
def set_SrcAppid(self,SrcAppid):
self.add_query_param('SrcAppid',SrcAppid)
def get_ExpectedImportTime(self):
return self.get_query_params().get('ExpectedImportTime')
def set_ExpectedImportTime(self,ExpectedImportTime):
self.add_query_param('ExpectedImportTime',ExpectedImportTime)
def get_SrcAddress(self):
return self.get_query_params().get('SrcAddress')
def set_SrcAddress(self,SrcAddress):
self.add_query_param('SrcAddress',SrcAddress)
def get_SrcAccessKeyId(self):
return self.get_query_params().get('SrcAccessKeyId')
def set_SrcAccessKeyId(self,SrcAccessKeyId):
self.add_query_param('SrcAccessKeyId',SrcAccessKeyId)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_IncrementalRepeatCount(self):
return self.get_query_params().get('IncrementalRepeatCount')
def set_IncrementalRepeatCount(self,IncrementalRepeatCount):
self.add_query_param('IncrementalRepeatCount',IncrementalRepeatCount)
def get_SrcAccessKeySecret(self):
return self.get_query_params().get('SrcAccessKeySecret')
def set_SrcAccessKeySecret(self,SrcAccessKeySecret):
self.add_query_param('SrcAccessKeySecret',SrcAccessKeySecret | null |
1,132 | import re
import pytest
import falcon
import falcon.testing as testing
from _util import create_app, disable_asgi_non_coroutine_wrapping # NOQA
class Proxy:
def forward(self, req):
return falcon.HTTP_503
class Sink:
def __init__(self):
self._proxy = Proxy()
def __call__(self, req, resp, **kwargs):
resp.status = self._proxy.forward(req)
self.kwargs = kwargs
class SinkAsync(Sink):
async def __call__(self, req, resp, **kwargs):
super().__call__(req, resp, **kwargs)
def kitchen_sink(req, resp, **kwargs):
resp.set_header('X-Missing-Feature', 'kitchen-sink')
async def METHOD_NAME(req, resp, **kwargs):
kitchen_sink(req, resp, **kwargs)
class BookCollection(testing.SimpleTestResource):
pass
@pytest.fixture
def resource():
return BookCollection()
@pytest.fixture
def sink(asgi):
return SinkAsync() if asgi else Sink()
@pytest.fixture
def client(asgi):
app = create_app(asgi)
return testing.TestClient(app)
class TestDefaultRouting:
def test_single_default_pattern(self, client, sink, resource):
client.app.add_sink(sink)
response = client.simulate_request(path='/')
assert response.status == falcon.HTTP_503
def test_single_simple_pattern(self, client, sink, resource):
client.app.add_sink(sink, r'/foo')
response = client.simulate_request(path='/foo/bar')
assert response.status == falcon.HTTP_503
def test_single_compiled_pattern(self, client, sink, resource):
client.app.add_sink(sink, re.compile(r'/foo'))
response = client.simulate_request(path='/foo/bar')
assert response.status == falcon.HTTP_503
response = client.simulate_request(path='/auth')
assert response.status == falcon.HTTP_404
def test_named_groups(self, client, sink, resource):
client.app.add_sink(sink, r'/user/(?P<id>\d+)')
response = client.simulate_request(path='/user/309')
assert response.status == falcon.HTTP_503
assert sink.kwargs['id'] == '309'
response = client.simulate_request(path='/user/sally')
assert response.status == falcon.HTTP_404
def test_multiple_patterns(self, asgi, client, sink, resource):
if asgi:
async def sink_too(req, resp):
resp.status = falcon.HTTP_781
else:
def sink_too(req, resp):
resp.status = falcon.HTTP_781
client.app.add_sink(sink, r'/foo')
client.app.add_sink(sink_too, r'/foo') # Last duplicate wins
client.app.add_sink(sink, r'/katza')
response = client.simulate_request(path='/foo/bar')
assert response.status == falcon.HTTP_781
response = client.simulate_request(path='/katza')
assert response.status == falcon.HTTP_503
def test_with_route(self, client, sink, resource):
client.app.add_route('/books', resource)
client.app.add_sink(sink, '/proxy')
response = client.simulate_request(path='/proxy/books')
assert not resource.called
assert response.status == falcon.HTTP_503
response = client.simulate_request(path='/books')
assert resource.called
assert response.status == falcon.HTTP_200
def test_route_precedence(self, client, sink, resource):
# NOTE(kgriffs): In case of collision, the route takes precedence.
client.app.add_route('/books', resource)
client.app.add_sink(sink, '/books')
response = client.simulate_request(path='/books')
assert resource.called
assert response.status == falcon.HTTP_200
def test_route_precedence_with_id(self, client, sink, resource):
# NOTE(kgriffs): In case of collision, the route takes precedence.
client.app.add_route('/books/{id}', resource)
client.app.add_sink(sink, '/books')
response = client.simulate_request(path='/books')
assert not resource.called
assert response.status == falcon.HTTP_503
def test_route_precedence_with_both_id(self, client, sink, resource):
# NOTE(kgriffs): In case of collision, the route takes precedence.
client.app.add_route('/books/{id}', resource)
client.app.add_sink(sink, r'/books/\d+')
response = client.simulate_request(path='/books/123')
assert resource.called
assert response.status == falcon.HTTP_200
class TestSinkMethodCompatibility:
def _verify_kitchen_sink(self, client):
resp = client.simulate_request('BREW', '/features')
assert resp.status_code == 200
assert resp.headers.get('X-Missing-Feature') == 'kitchen-sink'
def test_add_async_sink(self, client, asgi):
if not asgi:
with pytest.raises(falcon.CompatibilityError):
client.app.add_sink(METHOD_NAME)
else:
client.app.add_sink(METHOD_NAME, '/features')
self._verify_kitchen_sink(client)
def test_add_sync_sink(self, client, asgi):
if asgi:
with disable_asgi_non_coroutine_wrapping():
with pytest.raises(falcon.CompatibilityError):
client.app.add_sink(kitchen_sink)
else:
client.app.add_sink(kitchen_sink, '/features')
self._verify_kitchen_sink(client)
def test_add_sync_sink_with_wrapping(self, client, asgi):
client.app.add_sink(kitchen_sink, '/features')
self._verify_kitchen_sink(client) | null |
1,133 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class ModifyInstanceNetworkSpecRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'ModifyInstanceNetworkSpec','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_ISP(self): # String
return self.get_query_params().get('ISP')
def set_ISP(self, ISP): # String
self.add_query_param('ISP', ISP)
def get_InternetMaxBandwidthOut(self): # Integer
return self.get_query_params().get('InternetMaxBandwidthOut')
def set_InternetMaxBandwidthOut(self, InternetMaxBandwidthOut): # Integer
self.add_query_param('InternetMaxBandwidthOut', InternetMaxBandwidthOut)
def get_StartTime(self): # String
return self.get_query_params().get('StartTime')
def set_StartTime(self, StartTime): # String
self.add_query_param('StartTime', StartTime)
def METHOD_NAME(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_EndTime(self): # String
return self.get_query_params().get('EndTime')
def set_EndTime(self, EndTime): # String
self.add_query_param('EndTime', EndTime)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_NetworkChargeType(self): # String
return self.get_query_params().get('NetworkChargeType')
def set_NetworkChargeType(self, NetworkChargeType): # String
self.add_query_param('NetworkChargeType', NetworkChargeType)
def get_InternetMaxBandwidthIn(self): # Integer
return self.get_query_params().get('InternetMaxBandwidthIn')
def set_InternetMaxBandwidthIn(self, InternetMaxBandwidthIn): # Integer
self.add_query_param('InternetMaxBandwidthIn', InternetMaxBandwidthIn)
def get_AllocatePublicIp(self): # Boolean
return self.get_query_params().get('AllocatePublicIp')
def set_AllocatePublicIp(self, AllocatePublicIp): # Boolean
self.add_query_param('AllocatePublicIp', AllocatePublicIp) | null |
1,134 | import random
class Colour:
BLACK = "\033[30m"
RED = "\033[91m"
GREEN = "\033[32m"
END = "\033[0m"
suits = (
Colour.RED + "Hearts" + Colour.END,
Colour.RED + "Diamonds" + Colour.END,
Colour.BLACK + "Spades" + Colour.END,
Colour.BLACK + "Clubs" + Colour.END,
)
ranks = (
"Two",
"Three",
"Four",
"Five",
"Six",
"Seven",
"Eight",
"Nine",
"Ten",
"Jack",
"Queen",
"King",
"Ace",
)
values = {
"Two": 2,
"Three": 3,
"Four": 4,
"Five": 5,
"Six": 6,
"Seven": 7,
"Eight": 8,
"Nine": 9,
"Ten": 10,
"Jack": 10,
"Queen": 10,
"King": 10,
"Ace": 11,
}
playing = True
class Card:
def __init__(self, suit, rank):
self.suit = suit
self.rank = rank
def __str__(self):
return self.rank + " of " + self.suit
class Deck:
def __init__(self):
self.deck = []
for suit in suits:
for rank in ranks:
self.deck.append(Card(suit, rank))
def __str__(self):
deck_comp = ""
for card in self.deck:
deck_comp += "\n " + card.__str__()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
single_card = self.deck.pop()
return single_card
class Hand:
def __init__(self):
self.cards = []
self.value = 0
self.aces = 0 # to keep track of aces
def add_card(self, card):
self.cards.append(card)
self.value += values[card.rank]
if card.rank == "Ace":
self.aces += 1
def adjust_for_ace(self):
while self.value > 21 and self.aces:
self.value -= 10
self.aces -= 1
class Chips:
def __init__(self):
self.total = 100
self.bet = 0
def METHOD_NAME(self):
self.total += self.bet
def lose_bet(self):
self.total -= self.bet
def take_bet(chips):
while True:
try:
chips.bet = int(input("How many chips would you like to bet? "))
except ValueError:
print("Your bet must be an integer! Try again.")
else:
if chips.bet > chips.total or chips.bet <= 0:
print(
"Your bet cannot exceed your balance and you have to enter a positive bet! Your current balance is: ",
chips.total,
)
else:
break
def hit(deck, hand):
hand.add_card(deck.deal())
hand.adjust_for_ace()
def hit_or_stand(deck, hand):
global playing
while True:
x = input("Would you like to Hit or Stand? Enter '1' or '0' ")
if x.lower() == "1":
hit(deck, hand)
elif x.lower() == "0":
print("You chose to stand. Dealer will hit.")
playing = False
else:
print("Wrong input, please try again.")
continue
break
def show_some(player, dealer):
print("\nDealer's Hand:")
print(" { hidden card }")
print("", dealer.cards[1])
print("\nYour Hand:", *player.cards, sep="\n ")
def show_all(player, dealer):
print("\nDealer's Hand:", *dealer.cards, sep="\n ")
print("Dealer's Hand =", dealer.value)
print("\nYour Hand:", *player.cards, sep="\n ")
print("Your Hand =", player.value)
def player_busts(player, dealer, chips):
print("You are BUSTED !")
chips.lose_bet()
def player_wins(player, dealer, chips):
print("You are the winner!")
chips.METHOD_NAME()
def dealer_busts(player, dealer, chips):
print("Dealer has BUSTED !")
chips.METHOD_NAME()
def dealer_wins(player, dealer, chips):
print("Dealer is the winner!")
chips.lose_bet()
def push(player, dealer):
print("The match is tie !")
# GAMEPLAY
player_chips = Chips()
while True:
print("\t **********************************************************")
print(
"\t Welcome to the game Casino - BLACK JACK ! "
)
print("\t **********************************************************")
print(Colour.BLACK + "\t ***************")
print("\t * A *")
print("\t * *")
print("\t * * *")
print("\t * *** *")
print("\t * ***** *")
print("\t * *** *")
print("\t * * *")
print("\t * *")
print("\t * *")
print("\t ***************" + Colour.END)
print(
"\nRULES: Get as close to 21 as you can but if you get more than 21 you will lose!\n Aces count as 1 or 11."
)
deck = Deck()
deck.shuffle()
player_hand = Hand()
player_hand.add_card(deck.deal())
player_hand.add_card(deck.deal())
dealer_hand = Hand()
dealer_hand.add_card(deck.deal())
dealer_hand.add_card(deck.deal())
take_bet(player_chips)
show_some(player_hand, dealer_hand)
while playing:
hit_or_stand(deck, player_hand)
show_some(player_hand, dealer_hand)
if player_hand.value > 21:
player_busts(player_hand, dealer_hand, player_chips)
break
if player_hand.value <= 21:
while dealer_hand.value < 17:
hit(deck, dealer_hand)
show_all(player_hand, dealer_hand)
if dealer_hand.value > 21:
dealer_busts(player_hand, dealer_hand, player_chips)
elif dealer_hand.value > player_hand.value:
dealer_wins(player_hand, dealer_hand, player_chips)
elif dealer_hand.value < player_hand.value:
player_wins(player_hand, dealer_hand, player_chips)
else:
push(player_hand, dealer_hand)
print("\nYour current balance stands at", player_chips.total)
if player_chips.total > 0:
new_game = input("Would you like to play another hand? Enter '1' or '0' ")
if new_game.lower() == "1":
playing = True
continue
else:
print(
"Thanks for playing!\n"
+ Colour.GREEN
+ "\t$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\n \t Congratulations! You won "
+ str(player_chips.total)
+ " coins!\n\t$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\n "
+ Colour.END
)
break
else:
print(
"Oops! You have bet all your chips and we are sorry you can't play more.\nThanks for playing! Do come again to Casino BLACK JACK!"
)
break | null |
1,135 | from __future__ import annotations
import asyncio
import traceback
from typing import List, Tuple
import aiotools
import pytest
from redis.asyncio import Redis
from redis.exceptions import ConnectionError as RedisConnectionError
from redis.exceptions import TimeoutError as RedisTimeoutError
from ai.backend.common import redis_helper
from ai.backend.common.types import HostPortPair, RedisConnectionInfo
from .docker import DockerRedisNode
from .utils import interrupt
@pytest.mark.redis
@pytest.mark.asyncio
@pytest.mark.xfail
@pytest.mark.parametrize("disruption_method", ["stop", "pause"])
async def METHOD_NAME(redis_container: tuple[str, HostPortPair], disruption_method: str) -> None:
do_pause = asyncio.Event()
paused = asyncio.Event()
do_unpause = asyncio.Event()
unpaused = asyncio.Event()
received_messages: List[str] = []
async def pop(r: RedisConnectionInfo, key: str) -> None:
try:
async with aiotools.aclosing(
redis_helper.blpop(r, key, reconnect_poll_interval=0.2),
) as agen:
async for raw_msg in agen:
msg = raw_msg.decode()
received_messages.append(msg)
except asyncio.CancelledError:
pass
except Exception:
traceback.print_exc()
addr = redis_container[1]
r = RedisConnectionInfo(
Redis.from_url(url=f"redis://{addr.host}:{addr.port}", socket_timeout=0.2),
service_name=None,
)
assert isinstance(r.client, Redis)
await r.client.delete("bl1")
pop_task = asyncio.create_task(pop(r, "bl1"))
interrupt_task = asyncio.create_task(
interrupt(
disruption_method,
DockerRedisNode("node", addr.port, redis_container[0]),
do_pause=do_pause,
do_unpause=do_unpause,
paused=paused,
unpaused=unpaused,
)
)
await asyncio.sleep(0)
for i in range(2):
print(f"pushing {i} to bl1")
await r.client.rpush("bl1", str(i))
await asyncio.sleep(0.1)
do_pause.set()
await paused.wait()
for i in range(2):
# The Redis server is dead temporarily...
if disruption_method == "stop":
with pytest.raises(RedisConnectionError):
await r.client.rpush("bl1", str(2 + i))
elif disruption_method == "pause":
with pytest.raises((asyncio.TimeoutError, RedisTimeoutError)):
await r.client.rpush("bl1", str(2 + i))
else:
raise RuntimeError("should not reach here")
await asyncio.sleep(0.1)
do_unpause.set()
await unpaused.wait()
for i in range(2):
await r.client.rpush("bl1", str(4 + i))
await asyncio.sleep(0.1)
await interrupt_task
pop_task.cancel()
await pop_task
assert pop_task.done()
all_messages = set(map(int, received_messages))
assert set(range(0, 2)) < all_messages
assert set(range(5, 6)) < all_messages # more msgs may be lost during restart
assert all_messages <= set(range(0, 6))
@pytest.mark.redis
@pytest.mark.asyncio
@pytest.mark.xfail
@pytest.mark.parametrize("disruption_method", ["stop", "pause"])
async def test_blist_with_retrying_rpush(
redis_container: Tuple[str, HostPortPair], disruption_method: str
) -> None:
do_pause = asyncio.Event()
paused = asyncio.Event()
do_unpause = asyncio.Event()
unpaused = asyncio.Event()
received_messages: List[str] = []
async def pop(r: RedisConnectionInfo, key: str) -> None:
try:
async with aiotools.aclosing(
redis_helper.blpop(r, key, reconnect_poll_interval=0.2),
) as agen:
async for raw_msg in agen:
msg = raw_msg.decode()
received_messages.append(msg)
except asyncio.CancelledError:
pass
addr = redis_container[1]
r = RedisConnectionInfo(
Redis.from_url(url=f"redis://{addr.host}:{addr.port}", socket_timeout=0.2),
service_name=None,
)
assert isinstance(r.client, Redis)
await r.client.delete("bl1")
pop_task = asyncio.create_task(pop(r, "bl1"))
interrupt_task = asyncio.create_task(
interrupt(
disruption_method,
DockerRedisNode("node", addr.port, redis_container[0]),
do_pause=do_pause,
do_unpause=do_unpause,
paused=paused,
unpaused=unpaused,
)
)
await asyncio.sleep(0)
for i in range(2):
await redis_helper.execute(r, lambda r: r.rpush("bl1", str(i)))
await asyncio.sleep(0.1)
do_pause.set()
await paused.wait()
async def wakeup():
await asyncio.sleep(2.0)
do_unpause.set()
wakeup_task = asyncio.create_task(wakeup())
for i in range(2):
await redis_helper.execute(r, lambda r: r.rpush("bl1", str(2 + i)))
await asyncio.sleep(0.1)
await wakeup_task
await unpaused.wait()
for i in range(2):
await redis_helper.execute(r, lambda r: r.rpush("bl1", str(4 + i)))
await asyncio.sleep(0.1)
await interrupt_task
pop_task.cancel()
await pop_task
assert pop_task.done()
all_messages = set(map(int, received_messages))
assert set(range(0, 2)) < all_messages
assert set(range(5, 6)) < all_messages # more msgs may be lost during restart
assert all_messages <= set(range(0, 6)) | null |
1,136 | # # Licensed to the Apache Software Foundation (ASF) under one
# # or more contributor license agreements. See the NOTICE file
# # distributed with this work for additional information
# # regarding copyright ownership. The ASF licenses this file
# # to you under the Apache License, Version 2.0 (the
# # "License"); you may not use this file except in compliance
# # with the License. You may obtain a copy of the License at
# #
# #
# # http://www.apache.org/licenses/LICENSE-2.0
# #
# #
# # Unless required by applicable law or agreed to in writing,
# # software distributed under the License is distributed on an
# # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# # KIND, either express or implied. See the License for the
# # specific language governing permissions and limitations
# # under the License.
class EndpointData():
def __init__(self):
self.endpoint_map = {
"cn-shanghai-internal-test-1": "business.aliyuncs.com",
"cn-shenzhen-su18-b01": "business.aliyuncs.com",
"cn-beijing": "business.aliyuncs.com",
"cn-shanghai-inner": "business.aliyuncs.com",
"cn-hangzhou-internal-prod-1": "business.aliyuncs.com",
"cn-north-2-gov-1": "business.aliyuncs.com",
"cn-yushanfang": "business.aliyuncs.com",
"cn-qingdao-nebula": "business.aliyuncs.com",
"cn-beijing-finance-pop": "business.aliyuncs.com",
"cn-wuhan": "business.aliyuncs.com",
"cn-zhangjiakou": "business.aliyuncs.com",
"us-west-1": "business.ap-southeast-1.aliyuncs.com",
"cn-zhangbei": "business.aliyuncs.com",
"rus-west-1-pop": "business.ap-southeast-1.aliyuncs.com",
"cn-shanghai-et15-b01": "business.aliyuncs.com",
"cn-hangzhou-bj-b01": "business.aliyuncs.com",
"cn-zhangbei-na61-b01": "business.aliyuncs.com",
"ap-northeast-2": "business.ap-southeast-1.aliyuncs.com",
"ap-northeast-1": "business.ap-southeast-1.aliyuncs.com",
"cn-huhehaote-nebula-1": "business.aliyuncs.com",
"cn-shanghai-et2-b01": "business.aliyuncs.com",
"ap-southeast-1": "business.ap-southeast-1.aliyuncs.com",
"ap-southeast-2": "business.ap-southeast-1.aliyuncs.com",
"ap-southeast-3": "business.ap-southeast-1.aliyuncs.com",
"ap-southeast-5": "business.ap-southeast-1.aliyuncs.com",
"us-east-1": "business.ap-southeast-1.aliyuncs.com",
"cn-shenzhen-inner": "business.aliyuncs.com",
"cn-zhangjiakou-na62-a01": "business.aliyuncs.com",
"cn-beijing-gov-1": "business.aliyuncs.com",
"cn-wulanchabu": "business.aliyuncs.com",
"ap-south-1": "business.ap-southeast-1.aliyuncs.com",
"cn-shenzhen-st4-d01": "business.aliyuncs.com",
"cn-haidian-cm12-c01": "business.aliyuncs.com",
"cn-qingdao": "business.aliyuncs.com",
"cn-hongkong-finance-pop": "business.aliyuncs.com",
"cn-shanghai": "business.aliyuncs.com",
"cn-shanghai-finance-1": "business.aliyuncs.com",
"cn-hongkong": "business.aliyuncs.com",
"eu-central-1": "business.ap-southeast-1.aliyuncs.com",
"cn-shenzhen": "business.aliyuncs.com",
"cn-zhengzhou-nebula-1": "business.aliyuncs.com",
"eu-west-1": "business.ap-southeast-1.aliyuncs.com",
"cn-hangzhou-internal-test-1": "business.aliyuncs.com",
"eu-west-1-oxs": "business.ap-southeast-1.aliyuncs.com",
"cn-beijing-finance-1": "business.aliyuncs.com",
"cn-hangzhou-internal-test-3": "business.aliyuncs.com",
"cn-hangzhou-internal-test-2": "business.aliyuncs.com",
"cn-shenzhen-finance-1": "business.aliyuncs.com",
"me-east-1": "business.ap-southeast-1.aliyuncs.com",
"cn-chengdu": "business.aliyuncs.com",
"cn-hangzhou-test-306": "business.aliyuncs.com",
"cn-hangzhou-finance": "business.aliyuncs.com",
"cn-beijing-nu16-b01": "business.aliyuncs.com",
"cn-edge-1": "business.aliyuncs.com",
"cn-huhehaote": "business.aliyuncs.com",
"cn-fujian": "business.aliyuncs.com",
"ap-northeast-2-pop": "business.ap-southeast-1.aliyuncs.com",
"cn-hangzhou": "business.aliyuncs.com",
}
self.endpoint_regional = "regional"
def getEndpointMap(self):
return self.endpoint_map
def METHOD_NAME(self):
return self.endpoint_regional
endpoint_data = EndpointData() | null |
1,137 | import os
import pytest
from conftest import SATELLITE_PKG_DST, SATELLITE_PKG_URL, SYSTEM_RELEASE_ENV
from envparse import env
@pytest.fixture(scope="function")
def custom_subman(shell, repository=None):
""" """
# Setup repositories to install the subscription-manager from.
epel7_repository = "ubi"
epel8_repository = "baseos"
if SYSTEM_RELEASE_ENV in ("oracle-7", "centos-7"):
repository = epel7_repository
elif "oracle-8" in SYSTEM_RELEASE_ENV or "centos-8" in SYSTEM_RELEASE_ENV:
repository = epel8_repository
# On Oracle Linux 7 a "rhn-client-tools" package may be present on
# the system which prevents "subscription-manager" to be installed.
# Remove package rhn-client-tools from Oracle Linux 7.
if "oracle-7" in SYSTEM_RELEASE_ENV:
assert shell("yum remove -y rhn-client-tools")
assert shell(f"cp files/{repository}.repo /etc/yum.repos.d/")
# Install subscription-manager from 'custom' repositories, disable others for the transaction.
assert shell(f"yum -y --disablerepo=* --enablerepo={repository} install subscription-manager").returncode == 0
yield
# Remove custom subscription-manager
assert shell(f"yum remove -y --disablerepo=* --enablerepo={repository} subscription-manager*").returncode == 0
assert shell(f"rm -f /etc/yum.repos.d/{repository}.repo").returncode == 0
# Install back previously removed client tools
if "oracle-7" in SYSTEM_RELEASE_ENV:
shell("yum install -y rhn-client-tools")
shell("yum remove -y python-syspurpose")
# The termination of the conversion does not happen fast enough, so same packages can get removed
# Install the package back to avoid leaving the system in tainted state
elif "centos-8" in SYSTEM_RELEASE_ENV:
shell("yum install -y centos-gpg-keys centos-logos")
elif "oracle-8" in SYSTEM_RELEASE_ENV:
shell("yum install -y oraclelinux-release-el8-* oraclelinux-release-8* redhat-release-8*")
# Some packages might get downgraded during the setup; update just to be sure the system is fine
shell("yum update -y")
@pytest.fixture(scope="function")
def METHOD_NAME(shell):
""" """
# OL distros may not have wget installed
assert shell("yum install wget -y").returncode == 0
# Install katello package for satellite
assert (
shell(
"wget --no-check-certificate --output-document {} {}".format(SATELLITE_PKG_DST, SATELLITE_PKG_URL)
).returncode
== 0
)
assert shell("rpm -i {}".format(SATELLITE_PKG_DST)).returncode == 0
yield
# Remove the katello packages
assert shell("yum remove -y katello-*").returncode == 0
assert shell(f"rm -f {SATELLITE_PKG_DST}").returncode == 0
@pytest.fixture(scope="function")
def kernel_check_envar(shell):
"""
Fixture.
Set CONVERT2RHEL_UNSUPPORTED_SKIP_KERNEL_CURRENCY_CHECK environment variable
to skip the kernel currency check.
"""
# Since we are moving all repos away, we need to bypass kernel check
os.environ["CONVERT2RHEL_UNSUPPORTED_SKIP_KERNEL_CURRENCY_CHECK"] = "1"
yield
# Remove the envar skipping the kernel check
del os.environ["CONVERT2RHEL_UNSUPPORTED_SKIP_KERNEL_CURRENCY_CHECK"]
@pytest.mark.test_unsuccessful_satellite_registration
def test_backup_os_release_wrong_registration(shell, convert2rhel, custom_subman):
"""
Verify that the os-release file is restored when the satellite registration fails.
Reference issue: RHELC-51
"""
assert shell("find /etc/os-release").returncode == 0
with convert2rhel("-y --no-rpm-va -k wrong_key -o rUbBiSh_pWd --debug --keep-rhsm") as c2r:
c2r.expect("Unable to register the system through subscription-manager.")
c2r.expect("Restore /etc/os-release from backup")
assert shell("find /etc/os-release").returncode == 0
@pytest.fixture(scope="function")
def system_release_missing(shell):
# Make backup copy of the file
backup_folder = "/tmp/missing-system-release_sysrelease_backup/"
assert shell(f"mkdir {backup_folder}").returncode == 0
assert shell(f"mv /etc/system-release {backup_folder}").returncode == 0
yield
# Restore the system
assert shell(f"mv -v {backup_folder}system-release /etc/").returncode == 0
assert shell(f"rm -rf {backup_folder}").returncode == 0
@pytest.mark.test_missing_system_release
def test_missing_system_release(shell, convert2rhel, system_release_missing):
"""
It is required to have /etc/system-release file present on the system.
If the file is missing inhibit the conversion.
"""
with convert2rhel(
"-y --no-rpm-va -k {} -o {} --debug".format(
env.str("SATELLITE_KEY"),
env.str("SATELLITE_ORG"),
)
) as c2r:
c2r.expect("Unable to find the /etc/system-release file containing the OS name and version")
assert c2r.exitstatus != 0
@pytest.mark.test_backup_os_release_no_envar
def test_backup_os_release_no_envar(
shell, convert2rhel, custom_subman, METHOD_NAME, repositories, kernel_check_envar
):
"""
This test case removes all the repos on the system which prevents the backup of some files.
Satellite is being used in all of test cases.
In this scenario there is no variable `CONVERT2RHEL_UNSUPPORTED_INCOMPLETE_ROLLBACK` set.
This means the conversion is inhibited in early stage.
"""
assert shell("find /etc/os-release").returncode == 0
with convert2rhel(
"-y --no-rpm-va -k {} -o {} --debug --keep-rhsm".format(
env.str("SATELLITE_KEY"),
env.str("SATELLITE_ORG"),
),
unregister=True,
) as c2r:
c2r.expect("set the environment variable 'CONVERT2RHEL_UNSUPPORTED_INCOMPLETE_ROLLBACK.")
assert c2r.exitstatus != 0
assert shell("find /etc/os-release").returncode == 0
@pytest.fixture(scope="function")
def unsupported_rollback_envar(shell):
os.environ["CONVERT2RHEL_UNSUPPORTED_INCOMPLETE_ROLLBACK"] = "1"
yield
del os.environ["CONVERT2RHEL_UNSUPPORTED_INCOMPLETE_ROLLBACK"]
@pytest.mark.test_backup_os_release_with_envar
def test_backup_os_release_with_envar(
shell, convert2rhel, custom_subman, METHOD_NAME, repositories, unsupported_rollback_envar, kernel_check_envar
):
"""
In this scenario the variable `CONVERT2RHEL_UNSUPPORTED_INCOMPLETE_ROLLBACK` is set.
This test case removes all the repos on the system and validates that
the /etc/os-release package is being backed up and restored during rollback.
Ref ticket: OAMG-5457. Note that after the test, the $releasever
variable is unset.
"""
assert shell("find /etc/os-release").returncode == 0
with convert2rhel(
"-y --no-rpm-va -k {} -o {} --debug --keep-rhsm".format(
env.str("SATELLITE_KEY"),
env.str("SATELLITE_ORG"),
),
unregister=True,
) as c2r:
c2r.expect(
"'CONVERT2RHEL_UNSUPPORTED_INCOMPLETE_ROLLBACK' environment variable detected, continuing conversion."
)
c2r.sendcontrol("c")
assert c2r.exitstatus != 0
assert shell("find /etc/os-release").returncode == 0 | null |
1,138 | # Copyright 2017-2020 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import pytest
from ..common_utils.entity_managers import PipelineManager
from ..common_utils.test_utils import format_name
from ..utils.pipeline_utils import *
MAX_REP_COUNT = 150
class TestTerminateInstanceBeforeKubeRegistration(object):
pipeline_id = None
run_id = None
state = FailureIndicator()
test_case = "TC-SCALING-7"
@classmethod
def setup_class(cls):
logging.basicConfig(filename=get_log_filename(), level=logging.INFO,
format='%(levelname)s %(asctime)s %(module)s:%(message)s')
pipeline_name = format_name("test_terminate_instance_before_registration")
cls.pipeline_id = PipelineManager.create(pipeline_name)
logging.info("Pipeline {} with ID {} created.".format(pipeline_name, cls.pipeline_id))
try:
run_id = run_pipe(pipeline_name, "-id", "11")[0]
cls.run_id = run_id
logging.info("Pipeline run with ID {}.".format(cls.run_id))
wait_for_required_status("SCHEDULED", run_id, MAX_REP_COUNT)
wait_for_instance_creation(run_id, MAX_REP_COUNT)
logging.info("Instance {} created.".format(run_id))
terminate_instance(run_id)
wait_for_instance_termination(run_id, MAX_REP_COUNT)
logging.info("Instance {} terminated.".format(run_id))
except BaseException as e:
logging.error(e.message)
cls.teardown_class()
raise RuntimeError(e.message)
@classmethod
def teardown_class(cls):
node_name = get_node_name(cls.run_id)
terminate_node(node_name)
logging.info("Node {} was terminated".format(node_name))
if not cls.state.failure:
PipelineManager.delete(cls.pipeline_id)
logging.info("Pipeline {} deleted".format(cls.pipeline_id))
wait_for_instance_termination(cls.run_id, 150)
@pytest.mark.run(order=2)
def METHOD_NAME(self):
try:
status = get_pipe_status(self.run_id)
if status != "SUCCESS":
status = wait_for_required_status("RUNNING", self.run_id, 400, validation=False)
assert status == "RUNNING" or status == "SUCCESS", \
"Pipeline should wait for node registration. Current status: {}".format(status)
except AssertionError as e:
logging.info("Case {} failed!".format(self.test_case))
self.state.failure = True
pytest.fail("Test case {} failed.\n{}".format(self.test_case, e.message))
@pytest.mark.run(order=1)
def test_new_node_should_be_created(self):
try:
wait_for_node_up(self.run_id, 400, validation=False)
node_name = get_node_name(self.run_id)
logging.info("Node {} in use.".format(node_name))
assert len(get_cluster_state_for_run_id(self.run_id)) == 1, "Cluster should have exact one extra node."
except AssertionError as e:
logging.info("Case {} failed!".format(self.test_case))
self.state.failure = True
pytest.fail("Test case {} failed.\n{}".format(self.test_case, e.message))
@pytest.mark.run(order=3)
def test_cluster_should_not_have_node_without_label(self):
try:
node_name = get_node_name(self.run_id)
assert len(get_nodes_without_labels(node_name)) == 0, "Cluster should not have nodes without labels."
except AssertionError as e:
logging.info("Case {} failed!".format(self.test_case))
self.state.failure = True
pytest.fail("Test case {} failed.\n{}".format(self.test_case, e.message)) | null |
1,139 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdyvmsapi.endpoint import endpoint_data
class SmartCallRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dyvmsapi', '2017-05-25', 'SmartCall','dyvms')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_VoiceCodeParam(self):
return self.get_query_params().get('VoiceCodeParam')
def set_VoiceCodeParam(self,VoiceCodeParam):
self.add_query_param('VoiceCodeParam',VoiceCodeParam)
def get_EarlyMediaAsr(self):
return self.get_query_params().get('EarlyMediaAsr')
def set_EarlyMediaAsr(self,EarlyMediaAsr):
self.add_query_param('EarlyMediaAsr',EarlyMediaAsr)
def get_BackgroundSpeed(self):
return self.get_query_params().get('BackgroundSpeed')
def set_BackgroundSpeed(self,BackgroundSpeed):
self.add_query_param('BackgroundSpeed',BackgroundSpeed)
def get_BackgroundVolume(self):
return self.get_query_params().get('BackgroundVolume')
def set_BackgroundVolume(self,BackgroundVolume):
self.add_query_param('BackgroundVolume',BackgroundVolume)
def get_Speed(self):
return self.get_query_params().get('Speed')
def set_Speed(self,Speed):
self.add_query_param('Speed',Speed)
def get_AsrBaseId(self):
return self.get_query_params().get('AsrBaseId')
def set_AsrBaseId(self,AsrBaseId):
self.add_query_param('AsrBaseId',AsrBaseId)
def get_SessionTimeout(self):
return self.get_query_params().get('SessionTimeout')
def set_SessionTimeout(self,SessionTimeout):
self.add_query_param('SessionTimeout',SessionTimeout)
def get_DynamicId(self):
return self.get_query_params().get('DynamicId')
def set_DynamicId(self,DynamicId):
self.add_query_param('DynamicId',DynamicId)
def get_CalledNumber(self):
return self.get_query_params().get('CalledNumber')
def set_CalledNumber(self,CalledNumber):
self.add_query_param('CalledNumber',CalledNumber)
def get_TtsSpeed(self):
return self.get_query_params().get('TtsSpeed')
def set_TtsSpeed(self,TtsSpeed):
self.add_query_param('TtsSpeed',TtsSpeed)
def get_VoiceCode(self):
return self.get_query_params().get('VoiceCode')
def set_VoiceCode(self,VoiceCode):
self.add_query_param('VoiceCode',VoiceCode)
def METHOD_NAME(self):
return self.get_query_params().get('CalledShowNumber')
def set_CalledShowNumber(self,CalledShowNumber):
self.add_query_param('CalledShowNumber',CalledShowNumber)
def get_EnableITN(self):
return self.get_query_params().get('EnableITN')
def set_EnableITN(self,EnableITN):
self.add_query_param('EnableITN',EnableITN)
def get_ActionCodeTimeBreak(self):
return self.get_query_params().get('ActionCodeTimeBreak')
def set_ActionCodeTimeBreak(self,ActionCodeTimeBreak):
self.add_query_param('ActionCodeTimeBreak',ActionCodeTimeBreak)
def get_TtsConf(self):
return self.get_query_params().get('TtsConf')
def set_TtsConf(self,TtsConf):
self.add_query_param('TtsConf',TtsConf)
def get_ActionCodeBreak(self):
return self.get_query_params().get('ActionCodeBreak')
def set_ActionCodeBreak(self,ActionCodeBreak):
self.add_query_param('ActionCodeBreak',ActionCodeBreak)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_RecordFlag(self):
return self.get_query_params().get('RecordFlag')
def set_RecordFlag(self,RecordFlag):
self.add_query_param('RecordFlag',RecordFlag)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_TtsVolume(self):
return self.get_query_params().get('TtsVolume')
def set_TtsVolume(self,TtsVolume):
self.add_query_param('TtsVolume',TtsVolume)
def get_StreamAsr(self):
return self.get_query_params().get('StreamAsr')
def set_StreamAsr(self,StreamAsr):
self.add_query_param('StreamAsr',StreamAsr)
def get_Volume(self):
return self.get_query_params().get('Volume')
def set_Volume(self,Volume):
self.add_query_param('Volume',Volume)
def get_MuteTime(self):
return self.get_query_params().get('MuteTime')
def set_MuteTime(self,MuteTime):
self.add_query_param('MuteTime',MuteTime)
def get_BackgroundFileCode(self):
return self.get_query_params().get('BackgroundFileCode')
def set_BackgroundFileCode(self,BackgroundFileCode):
self.add_query_param('BackgroundFileCode',BackgroundFileCode)
def get_OutId(self):
return self.get_query_params().get('OutId')
def set_OutId(self,OutId):
self.add_query_param('OutId',OutId)
def get_AsrModelId(self):
return self.get_query_params().get('AsrModelId')
def set_AsrModelId(self,AsrModelId):
self.add_query_param('AsrModelId',AsrModelId)
def get_PauseTime(self):
return self.get_query_params().get('PauseTime')
def set_PauseTime(self,PauseTime):
self.add_query_param('PauseTime',PauseTime)
def get_TtsStyle(self):
return self.get_query_params().get('TtsStyle')
def set_TtsStyle(self,TtsStyle):
self.add_query_param('TtsStyle',TtsStyle | null |
1,140 | # Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
from NeoVintageous.tests import unittest
class Test_dollar(unittest.FunctionalTestCase):
def METHOD_NAME(self):
self.eq('one |two three', 'n_$', 'one two thre|e')
self.eq('one |two three\nfour', 'n_2$', 'one two three\nfou|r')
self.eq('|abc\nabc\n', 'n_$', 'ab|c\nabc\n')
self.eq('|abc\nabc\nabc\nabc\nabc\nabc\nabc\nabc\nabc\nabc\n', 'n_5$', 'abc\nabc\nabc\nabc\nab|c\nabc\nabc\nabc\nabc\nabc\n') # noqa: E501
self.eq('abc\n|\nabc\n', 'n_$', 'abc\n|\nabc\n')
self.eq('|', 'n_$', '|')
self.eq('a|b1\ncd2\nef3\ngh4', 'n_1$', 'ab|1\ncd2\nef3\ngh4')
self.eq('a|b1\ncd2\nef3\ngh4', 'n_2$', 'ab1\ncd|2\nef3\ngh4')
self.eq('a|b1\ncd2\nef3\ngh4', 'n_3$', 'ab1\ncd2\nef|3\ngh4')
self.eq('a|b1\ncd2\nef3\ngh4', 'n_4$', 'ab1\ncd2\nef3\ngh|4')
self.eq('fi|zz \nbuzz', 'n_$', 'fizz | \nbuzz')
self.eq('\n\n|\n\n\n', 'n_$', '\n\n|\n\n\n')
self.eq('\n\n|\n\n\n', 'n_1$', '\n\n|\n\n\n')
self.eq('\n\n|\n\n\n', 'n_2$', '\n\n\n|\n\n')
self.eq('\n\n|\n\n\n', 'n_3$', '\n\n\n\n|\n')
def test_v(self):
self.eq('one |two three', 'v_$', 'one |two three|')
self.eq('one |two three\nfour', 'v_$', 'one |two three\n|four')
self.eq('one |two three\nfour', 'v_2$', 'one |two three\nfour|')
self.eq('|abc\nabc\n', 'v_$', '|abc\n|abc\n')
self.eq('|abc\nabc\nabc\nabc\nabc\nabc\nabc\nabc\nabc\nabc\n', 'v_5$', '|abc\nabc\nabc\nabc\nabc\n|abc\nabc\nabc\nabc\nabc\n') # noqa: E501
self.eq('abc\n|\n|abc\n', 'v_$', 'abc\n|\n|abc\n')
self.eq('r_a|bc\nab|c\n', 'v_$', 'r_abc|\nab|c\n')
self.eq('ab|c|\nabc\n', 'v_$', 'ab|c\n|abc\n')
self.eq('r_abc\n|a|bc\n', 'v_2$', 'abc\n|abc\n|')
self.eq('r_|abc|\nxy', 'v_$', 'ab|c\n|xy')
self.eq('a|b1\ncd2\nef3\ngh4', 'v_1$', 'a|b1\n|cd2\nef3\ngh4')
self.eq('a|b1\ncd2\nef3\ngh4', 'v_2$', 'a|b1\ncd2\n|ef3\ngh4')
self.eq('a|b1\ncd2\nef3\ngh4', 'v_3$', 'a|b1\ncd2\nef3\n|gh4')
self.eq('a|b1\ncd2\nef3\ngh4', 'v_4$', 'a|b1\ncd2\nef3\ngh4|')
self.eq('fi|zz \nbuzz', 'v_$', 'fi|zz \n|buzz')
def test_V(self):
self.eq('|abc\n|abc\nabc\n', 'V_$', '|abc\n|abc\nabc\n')
self.eq('1\n|fizz\n|buzz\nfour\nfive', 'V_2$', '1\n|fizz\nbuzz\n|four\nfive')
self.eq('1\n|fizz\n|buzz\nfour\nfive', 'V_3$', '1\n|fizz\nbuzz\nfour\n|five')
self.eq('1\n|fizz\n|buzz\nfour\nfive', 'V_4$', '1\n|fizz\nbuzz\nfour\nfive|')
self.eq('1\n|fizz\n|buzz\nfour\nfive', 'V_9$', '1\n|fizz\nbuzz\nfour\nfive|')
self.eq('r_1\n|fizz\nbuzz\n|four\nfive', 'V_2$', 'r_1\nfizz\n|buzz\n|four\nfive')
self.eq('r_1\n|fizz\nbuzz\n|four\nfive', 'V_3$', '1\nfizz\n|buzz\nfour\n|five')
def test_b(self):
self.eq('x\n|fizz| buzz\n|fizz| buzz\ny', 'b_$', 'x\n|fizz buzz\n||fizz buzz\n|y')
@unittest.mock_bell()
def test_c(self):
self.eq('fi|zz', 'c$', 'i_fi|')
self.eq('fi|zz\n', 'c$', 'i_fi|\n')
self.eq('fi|zz\nbuzz', 'c$', 'i_fi|\nbuzz')
self.eq('fizz\n|\nbuzz', 'c$', 'i_fizz\n|\nbuzz')
self.eq('\n\n|\n\n', 'c$', 'i_\n\n|\n\n')
self.eq('|', 'c$', 'i_|')
self.eq('fi|zz ', 'c$', 'i_fi|')
self.eq('fi|zz \n', 'c$', 'i_fi|\n')
self.eq('fi|zz \nbuzz', 'c$', 'i_fi|\nbuzz')
self.eq('fizz | \nbuzz', 'c$', 'i_fizz |\nbuzz')
self.assertNoBell()
@unittest.mock_bell()
def test_d(self):
self.eq('one |two three', 'd$', 'one| ')
self.eq('one t|wo three', 'd$', 'one |t')
self.eq('|abc\nabc\n', 'd$', '|\nabc\n')
self.eq('|abc\nabc\nabc\nabc\n', '3d$', '|abc\n')
self.eq('|fizz\nbuzz\n', 'd$', '|\nbuzz\n')
self.eq('|ab1\nab2\nab3\nab4\n', '3d$', '|ab4\n')
self.eq('123|4', 'd$', '12|3')
self.eq('123|4\nfizz', 'd$', '12|3\nfizz')
self.eq('|ab1\ncd2\nef3\ngh4', 'd$', '|\ncd2\nef3\ngh4')
self.eq('a|b1\ncd2\nef3\ngh4', 'd$', '|a\ncd2\nef3\ngh4')
self.eq('fi|zz\nfuzz\nabc\ndef', '2d$', 'f|i\nabc\ndef')
self.eq('fi|zz\nfuzz\nabc\ndef', '2d$', 'f|i\nabc\ndef')
self.eq('fi|zz\nfuzz\nabc\ndef', '3d$', 'f|i\ndef')
self.eq('first1\nst|art2\nxxx\nfour\nfive', '2d$', 'first1\ns|t\nfour\nfive')
self.eq('first1\n|start2\n\n\nfive', '2d$', 'first1\n|\nfive')
self.eq('fi|zz \nbuzz', 'd$', 'f|i\nbuzz')
self.assertNoBell()
self.eq('\n\n|\n\n', 'd$', '\n\n|\n\n')
self.eq('fizz\n|\n\n\n\nbuzz', 'd$', 'fizz\n|\n\n\n\nbuzz')
self.eq('fizz\n|\n\n\n\nbuzz', '1d$', 'fizz\n|\n\n\n\nbuzz')
self.eq('fizz\n|\n\n\n\nbuzz', '2d$', 'fizz\n|\n\nbuzz')
self.eq('fizz\n|\n\n\n\nbuzz', '3d$', 'fizz\n|\nbuzz') | null |
1,141 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkslb.endpoint import endpoint_data
class CreateLoadBalancerUDPListenerRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Slb', '2014-05-15', 'CreateLoadBalancerUDPListener','slb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AclStatus(self): # String
return self.get_query_params().get('AclStatus')
def set_AclStatus(self, AclStatus): # String
self.add_query_param('AclStatus', AclStatus)
def get_AclType(self): # String
return self.get_query_params().get('AclType')
def set_AclType(self, AclType): # String
self.add_query_param('AclType', AclType)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_VServerGroupId(self): # String
return self.get_query_params().get('VServerGroupId')
def set_VServerGroupId(self, VServerGroupId): # String
self.add_query_param('VServerGroupId', VServerGroupId)
def get_AclId(self): # String
return self.get_query_params().get('AclId')
def set_AclId(self, AclId): # String
self.add_query_param('AclId', AclId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_MasterSlaveServerGroupId(self): # String
return self.get_query_params().get('MasterSlaveServerGroupId')
def set_MasterSlaveServerGroupId(self, MasterSlaveServerGroupId): # String
self.add_query_param('MasterSlaveServerGroupId', MasterSlaveServerGroupId)
def get_healthCheckReq(self): # String
return self.get_query_params().get('healthCheckReq')
def set_healthCheckReq(self, healthCheckReq): # String
self.add_query_param('healthCheckReq', healthCheckReq)
def get_BackendServerPort(self): # Integer
return self.get_query_params().get('BackendServerPort')
def set_BackendServerPort(self, BackendServerPort): # Integer
self.add_query_param('BackendServerPort', BackendServerPort)
def get_healthCheckInterval(self): # Integer
return self.get_query_params().get('healthCheckInterval')
def set_healthCheckInterval(self, healthCheckInterval): # Integer
self.add_query_param('healthCheckInterval', healthCheckInterval)
def get_healthCheckExp(self): # String
return self.get_query_params().get('healthCheckExp')
def set_healthCheckExp(self, healthCheckExp): # String
self.add_query_param('healthCheckExp', healthCheckExp)
def get_ProxyProtocolV2Enabled(self): # Boolean
return self.get_query_params().get('ProxyProtocolV2Enabled')
def set_ProxyProtocolV2Enabled(self, ProxyProtocolV2Enabled): # Boolean
self.add_query_param('ProxyProtocolV2Enabled', ProxyProtocolV2Enabled)
def get_HealthCheckSwitch(self): # String
return self.get_query_params().get('HealthCheckSwitch')
def set_HealthCheckSwitch(self, HealthCheckSwitch): # String
self.add_query_param('HealthCheckSwitch', HealthCheckSwitch)
def get_HealthCheckConnectTimeout(self): # Integer
return self.get_query_params().get('HealthCheckConnectTimeout')
def set_HealthCheckConnectTimeout(self, HealthCheckConnectTimeout): # Integer
self.add_query_param('HealthCheckConnectTimeout', HealthCheckConnectTimeout)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_ListenerPort(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Bandwidth(self): # Integer
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self, Bandwidth): # Integer
self.add_query_param('Bandwidth', Bandwidth)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_HealthCheckConnectPort(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort) | null |
1,142 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbase.endpoint import endpoint_data
class RelateDbForHBaseHaRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'HBase', '2019-01-01', 'RelateDbForHBaseHa','hbase')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_HaMigrateType(self):
return self.get_query_params().get('HaMigrateType')
def set_HaMigrateType(self,HaMigrateType):
self.add_query_param('HaMigrateType',HaMigrateType)
def get_HaActiveHdfsUri(self):
return self.get_query_params().get('HaActiveHdfsUri')
def set_HaActiveHdfsUri(self,HaActiveHdfsUri):
self.add_query_param('HaActiveHdfsUri',HaActiveHdfsUri)
def get_HaStandbyVersion(self):
return self.get_query_params().get('HaStandbyVersion')
def set_HaStandbyVersion(self,HaStandbyVersion):
self.add_query_param('HaStandbyVersion',HaStandbyVersion)
def get_IsStandbyStandard(self):
return self.get_query_params().get('IsStandbyStandard')
def set_IsStandbyStandard(self,IsStandbyStandard):
self.add_query_param('IsStandbyStandard',IsStandbyStandard)
def get_HaActiveClusterKey(self):
return self.get_query_params().get('HaActiveClusterKey')
def set_HaActiveClusterKey(self,HaActiveClusterKey):
self.add_query_param('HaActiveClusterKey',HaActiveClusterKey)
def get_HaStandbyPassword(self):
return self.get_query_params().get('HaStandbyPassword')
def set_HaStandbyPassword(self,HaStandbyPassword):
self.add_query_param('HaStandbyPassword',HaStandbyPassword)
def METHOD_NAME(self):
return self.get_query_params().get('HaStandbyClusterKey')
def set_HaStandbyClusterKey(self,HaStandbyClusterKey):
self.add_query_param('HaStandbyClusterKey',HaStandbyClusterKey)
def get_HaStandbyHbaseFsDir(self):
return self.get_query_params().get('HaStandbyHbaseFsDir')
def set_HaStandbyHbaseFsDir(self,HaStandbyHbaseFsDir):
self.add_query_param('HaStandbyHbaseFsDir',HaStandbyHbaseFsDir)
def get_HaActiveHbaseFsDir(self):
return self.get_query_params().get('HaActiveHbaseFsDir')
def set_HaActiveHbaseFsDir(self,HaActiveHbaseFsDir):
self.add_query_param('HaActiveHbaseFsDir',HaActiveHbaseFsDir)
def get_HaActiveDBType(self):
return self.get_query_params().get('HaActiveDBType')
def set_HaActiveDBType(self,HaActiveDBType):
self.add_query_param('HaActiveDBType',HaActiveDBType)
def get_HaActivePassword(self):
return self.get_query_params().get('HaActivePassword')
def set_HaActivePassword(self,HaActivePassword):
self.add_query_param('HaActivePassword',HaActivePassword)
def get_IsActiveStandard(self):
return self.get_query_params().get('IsActiveStandard')
def set_IsActiveStandard(self,IsActiveStandard):
self.add_query_param('IsActiveStandard',IsActiveStandard)
def get_HaStandbyUser(self):
return self.get_query_params().get('HaStandbyUser')
def set_HaStandbyUser(self,HaStandbyUser):
self.add_query_param('HaStandbyUser',HaStandbyUser)
def get_HaActive(self):
return self.get_query_params().get('HaActive')
def set_HaActive(self,HaActive):
self.add_query_param('HaActive',HaActive)
def get_HaStandby(self):
return self.get_query_params().get('HaStandby')
def set_HaStandby(self,HaStandby):
self.add_query_param('HaStandby',HaStandby)
def get_HaStandbyHdfsUri(self):
return self.get_query_params().get('HaStandbyHdfsUri')
def set_HaStandbyHdfsUri(self,HaStandbyHdfsUri):
self.add_query_param('HaStandbyHdfsUri',HaStandbyHdfsUri)
def get_HaActiveVersion(self):
return self.get_query_params().get('HaActiveVersion')
def set_HaActiveVersion(self,HaActiveVersion):
self.add_query_param('HaActiveVersion',HaActiveVersion)
def get_ClusterId(self):
return self.get_query_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_query_param('ClusterId',ClusterId)
def get_HaStandbyDBType(self):
return self.get_query_params().get('HaStandbyDBType')
def set_HaStandbyDBType(self,HaStandbyDBType):
self.add_query_param('HaStandbyDBType',HaStandbyDBType)
def get_HaTables(self):
return self.get_query_params().get('HaTables')
def set_HaTables(self,HaTables):
self.add_query_param('HaTables',HaTables)
def get_HaActiveUser(self):
return self.get_query_params().get('HaActiveUser')
def set_HaActiveUser(self,HaActiveUser):
self.add_query_param('HaActiveUser',HaActiveUser | null |
1,143 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CreateHostAvailabilityRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cms', '2019-01-01', 'CreateHostAvailability','cms')
self.set_method('POST')
def get_TaskOptionHttpMethod(self): # String
return self.get_query_params().get('TaskOption.HttpMethod')
def set_TaskOptionHttpMethod(self, TaskOptionHttpMethod): # String
self.add_query_param('TaskOption.HttpMethod', TaskOptionHttpMethod)
def get_TaskOptionHttpHeader(self): # String
return self.get_query_params().get('TaskOption.HttpHeader')
def set_TaskOptionHttpHeader(self, TaskOptionHttpHeader): # String
self.add_query_param('TaskOption.HttpHeader', TaskOptionHttpHeader)
def get_AlertConfigEscalationLists(self): # RepeatList
return self.get_query_params().get('AlertConfigEscalationList')
def set_AlertConfigEscalationLists(self, AlertConfigEscalationList): # RepeatList
for depth1 in range(len(AlertConfigEscalationList)):
if AlertConfigEscalationList[depth1].get('Times') is not None:
self.add_query_param('AlertConfigEscalationList.' + str(depth1 + 1) + '.Times', AlertConfigEscalationList[depth1].get('Times'))
if AlertConfigEscalationList[depth1].get('MetricName') is not None:
self.add_query_param('AlertConfigEscalationList.' + str(depth1 + 1) + '.MetricName', AlertConfigEscalationList[depth1].get('MetricName'))
if AlertConfigEscalationList[depth1].get('Value') is not None:
self.add_query_param('AlertConfigEscalationList.' + str(depth1 + 1) + '.Value', AlertConfigEscalationList[depth1].get('Value'))
if AlertConfigEscalationList[depth1].get('Operator') is not None:
self.add_query_param('AlertConfigEscalationList.' + str(depth1 + 1) + '.Operator', AlertConfigEscalationList[depth1].get('Operator'))
if AlertConfigEscalationList[depth1].get('Aggregate') is not None:
self.add_query_param('AlertConfigEscalationList.' + str(depth1 + 1) + '.Aggregate', AlertConfigEscalationList[depth1].get('Aggregate'))
def get_TaskName(self): # String
return self.get_query_params().get('TaskName')
def set_TaskName(self, TaskName): # String
self.add_query_param('TaskName', TaskName)
def get_AlertConfigSilenceTime(self): # Integer
return self.get_query_params().get('AlertConfig.SilenceTime')
def set_AlertConfigSilenceTime(self, AlertConfigSilenceTime): # Integer
self.add_query_param('AlertConfig.SilenceTime', AlertConfigSilenceTime)
def get_TaskOptionHttpResponseCharset(self): # String
return self.get_query_params().get('TaskOption.HttpResponseCharset')
def set_TaskOptionHttpResponseCharset(self, TaskOptionHttpResponseCharset): # String
self.add_query_param('TaskOption.HttpResponseCharset', TaskOptionHttpResponseCharset)
def METHOD_NAME(self): # Boolean
return self.get_query_params().get('TaskOption.HttpNegative')
def set_TaskOptionHttpNegative(self, TaskOptionHttpNegative): # Boolean
self.add_query_param('TaskOption.HttpNegative', TaskOptionHttpNegative)
def get_TaskOptionInterval(self): # Integer
return self.get_query_params().get('TaskOption.Interval')
def set_TaskOptionInterval(self, TaskOptionInterval): # Integer
self.add_query_param('TaskOption.Interval', TaskOptionInterval)
def get_AlertConfigNotifyType(self): # Integer
return self.get_query_params().get('AlertConfig.NotifyType')
def set_AlertConfigNotifyType(self, AlertConfigNotifyType): # Integer
self.add_query_param('AlertConfig.NotifyType', AlertConfigNotifyType)
def get_TaskOptionTelnetOrPingHost(self): # String
return self.get_query_params().get('TaskOption.TelnetOrPingHost')
def set_TaskOptionTelnetOrPingHost(self, TaskOptionTelnetOrPingHost): # String
self.add_query_param('TaskOption.TelnetOrPingHost', TaskOptionTelnetOrPingHost)
def get_TaskOptionHttpResponseMatchContent(self): # String
return self.get_query_params().get('TaskOption.HttpResponseMatchContent')
def set_TaskOptionHttpResponseMatchContent(self, TaskOptionHttpResponseMatchContent): # String
self.add_query_param('TaskOption.HttpResponseMatchContent', TaskOptionHttpResponseMatchContent)
def get_InstanceLists(self): # RepeatList
return self.get_query_params().get('InstanceList')
def set_InstanceLists(self, InstanceList): # RepeatList
for depth1 in range(len(InstanceList)):
self.add_query_param('InstanceList.' + str(depth1 + 1), InstanceList[depth1])
def get_TaskType(self): # String
return self.get_query_params().get('TaskType')
def set_TaskType(self, TaskType): # String
self.add_query_param('TaskType', TaskType)
def get_GroupId(self): # Long
return self.get_query_params().get('GroupId')
def set_GroupId(self, GroupId): # Long
self.add_query_param('GroupId', GroupId)
def get_AlertConfigEndTime(self): # Integer
return self.get_query_params().get('AlertConfig.EndTime')
def set_AlertConfigEndTime(self, AlertConfigEndTime): # Integer
self.add_query_param('AlertConfig.EndTime', AlertConfigEndTime)
def get_TaskOptionHttpURI(self): # String
return self.get_query_params().get('TaskOption.HttpURI')
def set_TaskOptionHttpURI(self, TaskOptionHttpURI): # String
self.add_query_param('TaskOption.HttpURI', TaskOptionHttpURI)
def get_TaskScope(self): # String
return self.get_query_params().get('TaskScope')
def set_TaskScope(self, TaskScope): # String
self.add_query_param('TaskScope', TaskScope)
def get_TaskOptionHttpPostContent(self): # String
return self.get_query_params().get('TaskOption.HttpPostContent')
def set_TaskOptionHttpPostContent(self, TaskOptionHttpPostContent): # String
self.add_query_param('TaskOption.HttpPostContent', TaskOptionHttpPostContent)
def get_AlertConfigStartTime(self): # Integer
return self.get_query_params().get('AlertConfig.StartTime')
def set_AlertConfigStartTime(self, AlertConfigStartTime): # Integer
self.add_query_param('AlertConfig.StartTime', AlertConfigStartTime)
def get_AlertConfigWebHook(self): # String
return self.get_query_params().get('AlertConfig.WebHook')
def set_AlertConfigWebHook(self, AlertConfigWebHook): # String
self.add_query_param('AlertConfig.WebHook', AlertConfigWebHook) | null |
1,144 | # Copyright (c) 2014, 2016, 2018-2019 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2003-2005 The Regents of The University of Michigan
# Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
###################
# Utility functions
#
# Indent every line in string 's' by two spaces
# (except preprocessor directives).
# Used to make nested code blocks look pretty.
#
def METHOD_NAME(s):
return re.sub(r"(?m)^(?!#)", " ", s)
# Regular expression object to match C++ strings
stringRE = re.compile(r'"([^"\\]|\\.)*"')
# Regular expression object to match C++ comments
# (used in findOperands())
commentRE = re.compile(
r"(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?",
re.DOTALL | re.MULTILINE,
)
# Regular expression object to match assignment statements (used in
# findOperands()). If the code immediately following the first
# appearance of the operand matches this regex, then the operand
# appears to be on the LHS of an assignment, and is thus a
# destination. basically we're looking for an '=' that's not '=='.
# The heinous tangle before that handles the case where the operand
# has an array subscript.
assignRE = re.compile(r"(\[[^\]]+\])?\s*=(?!=)", re.MULTILINE)
#
# Munge a somewhat arbitrarily formatted piece of Python code
# (e.g. from a format 'let' block) into something whose indentation
# will get by the Python parser.
#
# The two keys here are that Python will give a syntax error if
# there's any whitespace at the beginning of the first line, and that
# all lines at the same lexical nesting level must have identical
# indentation. Unfortunately the way code literals work, an entire
# let block tends to have some initial indentation. Rather than
# trying to figure out what that is and strip it off, we prepend 'if
# 1:' to make the let code the nested block inside the if (and have
# the parser automatically deal with the indentation for us).
#
# We don't want to do this if (1) the code block is empty or (2) the
# first line of the block doesn't have any whitespace at the front.
def fixPythonIndentation(s):
# get rid of blank lines first
s = re.sub(r"(?m)^\s*\n", "", s)
if s != "" and re.match(r"[ \t]", s[0]):
s = "if 1:\n" + s
return s
class ISAParserError(Exception):
"""Exception class for parser errors"""
def __init__(self, first, second=None):
if second is None:
self.lineno = 0
self.string = first
else:
self.lineno = first
self.string = second
def __str__(self):
return self.string
def error(*args):
raise ISAParserError(*args)
def protectNonSubstPercents(s):
"""Protect any non-dict-substitution '%'s in a format string
(i.e. those not followed by '(')"""
return re.sub(r"%(?!\()", "%%", s)
##############
# Stack: a simple stack object. Used for both formats (formatStack)
# and default cases (defaultStack). Simply wraps a list to give more
# stack-like syntax and enable initialization with an argument list
# (as opposed to an argument that's a list).
class Stack(list):
def __init__(self, *items):
list.__init__(self, items)
def push(self, item):
self.append(item)
def top(self):
return self[-1]
# Format a file include stack backtrace as a string
def backtrace(filename_stack):
fmt = "In file included from %s:"
return "\n".join([fmt % f for f in filename_stack])
#######################
#
# LineTracker: track filenames along with line numbers in PLY lineno fields
# PLY explicitly doesn't do anything with 'lineno' except propagate
# it. This class lets us tie filenames with the line numbers with a
# minimum of disruption to existing increment code.
#
class LineTracker(object):
def __init__(self, filename, lineno=1):
self.filename = filename
self.lineno = lineno
# Overload '+=' for increments. We need to create a new object on
# each update else every token ends up referencing the same
# constantly incrementing instance.
def __iadd__(self, incr):
return LineTracker(self.filename, self.lineno + incr)
def __str__(self):
return "%s:%d" % (self.filename, self.lineno)
# In case there are places where someone really expects a number
def __int__(self):
return self.lineno | null |
1,145 | # Copyright (c) 2017-2022 The Molecular Sciences Software Institute, Virginia Tech
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
'''
Computes the difference between basis sets and files
'''
import copy
from .compare import compare_electron_shells
from .. import fileio
def subtract_electron_shells(s1, s2, rel_tol=0.0):
"""
Returns the difference between two lists of electron shells (s1 - s2)
This will remove any shells from s1 that are also in s2, within a tolerance
"""
diff_shells = []
for sh1 in s1:
for sh2 in s2:
if compare_electron_shells(sh1, sh2, rel_tol=rel_tol):
break
else:
diff_shells.append(copy.deepcopy(sh1))
return diff_shells
def METHOD_NAME(left_list, right_list):
'''
Compute the difference between two sets of basis set dictionaries
The result is a list of dictionaries that correspond to each dictionary in
`left_list`. Each resulting dictionary will contain only the elements/shells
that exist in that entry and not in any of the dictionaries in `right_list`.
This only works on the shell level, and will only subtract entire shells
that are identical. ECP potentials are not affected.
The return value contains deep copies of the input data
Parameters
----------
left_list : list of dict
Dictionaries to use as the base
right_list : list of dict
Dictionaries of basis data to subtract from each dictionary of `left_list`
Returns
----------
list
Each object in `left_list` containing data that does not appear in `right_list`
'''
ret = []
for bs1 in left_list:
res = copy.deepcopy(bs1)
for bs2 in right_list:
for el in res['elements'].keys():
if el not in bs2['elements']:
continue # Element only exist in left
eldata1 = res['elements'][el]
eldata2 = bs2['elements'][el]
s1 = eldata1['electron_shells']
s2 = eldata2['electron_shells']
eldata1['electron_shells'] = subtract_electron_shells(s1, s2)
# Remove any empty elements
res['elements'] = {k: v for k, v in res['elements'].items() if v['electron_shells']}
ret.append(res)
return ret
def diff_json_files(left_files, right_files):
'''
Compute the difference between two sets of basis set JSON files
The output is a set of files that correspond to each file in
`left_files`. Each resulting dictionary will contain only the elements/shells
that exist in that entry and not in any of the files in `right_files`.
This only works on the shell level, and will only subtract entire shells
that are identical. ECP potentials are not affected.
`left_files` and `right_files` are lists of file paths. The output
is written to files with the same names as those in `left_files`,
but with `.diff` added to the end. If those files exist, they are overwritten.
Parameters
----------
left_files : list of str
Paths to JSON files to use as the base
right_files : list of str
Paths to JSON files to subtract from each file of `left_files`
Returns
----------
None
'''
left_data = [fileio.read_json_basis(x) for x in left_files]
right_data = [fileio.read_json_basis(x) for x in right_files]
d = METHOD_NAME(left_data, right_data)
for idx, diff_bs in enumerate(d):
fpath = left_files[idx]
fileio.write_json_basis(fpath + '.diff', diff_bs) | null |
1,146 | # Copyright 2017-2023 Posit Software, PBC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from guild import cli
from guild import guildfile
from guild import util
from guild import yaml_util
log = logging.getLogger("guild")
class _ImportedFlagsOpDefProxy:
def __init__(self, flags_data, wrapped_opdef):
self.guildfile = wrapped_opdef.guildfile
self.flags = self._init_flags(flags_data, wrapped_opdef.main)
def _init_flags(self, flags_data, main_mod):
flags = []
for name, flag_data in flags_data.items():
try:
flag_data = guildfile.coerce_flag_data(name, flag_data, self.guildfile)
except guildfile.GuildfileError as e:
if os.getenv("NO_WARN_FLAGS_IMPORT") != "1":
log.warning("cannot import flags from %s: %s", main_mod, e)
else:
flags.append(guildfile.FlagDef(name, flag_data, self))
return flags
def flag_values(self):
return {f.name: f.default for f in self.flags}
def apply_flags(opdef, import_flags_data_cb, apply_flags_data_cb=None):
log_flags_info("### Script flags for %s", opdef.fullname)
if _flags_import_disabled(opdef):
log_flags_info("flags import disabled - skipping")
return
import_all_marker = object()
to_import = _flags_to_import(opdef, import_all_marker)
to_skip = METHOD_NAME(opdef)
try:
flags_data = import_flags_data_cb()
except Exception as e:
if log.getEffectiveLevel() <= logging.DEBUG:
log.exception(repr(import_flags_data_cb))
log.warning(e)
else:
if apply_flags_data_cb:
apply_flags_data_cb(flags_data)
import_data = {
name: flags_data[name]
for name in flags_data if (
(to_import is import_all_marker or name in to_import)
and not name in to_skip
)
}
opdef.merge_flags(_ImportedFlagsOpDefProxy(import_data, opdef))
def log_flags_info(fmt, *args):
if os.getenv("FLAGS_TEST") == "1":
# pylint: disable=consider-using-generator
fmt_args = tuple([_fmt_arg(arg) for arg in args])
cli.note(fmt % fmt_args)
def _fmt_arg(arg):
if isinstance(arg, tuple):
return arg[0](*arg[1:])
return arg
def _flags_import_disabled(opdef):
return opdef.flags_import in (False, [])
def _flags_to_import(opdef, all_marker):
if opdef.flags_import in (True, "all"):
return all_marker
if opdef.flags_import is None:
# If flags-import is not configured, import all defined flags.
return {flag.name for flag in opdef.flags}
if isinstance(opdef.flags_import, list):
return set(opdef.flags_import)
return set([opdef.flags_import])
def METHOD_NAME(opdef):
if opdef.flags_import_skip:
return set(opdef.flags_import_skip)
return set()
def flag_data_for_val(val):
return {
"default": _flag_default(val),
"type": _flag_type(val),
"arg-split": _flag_arg_split(val),
}
def _flag_default(val):
if isinstance(val, list):
return _encode_splittable_list(val)
return val
def _encode_splittable_list(l):
return " ".join([util.shlex_quote(yaml_util.encode_yaml(x)) for x in l])
def _flag_type(val):
if isinstance(val, str):
return "string"
if isinstance(val, bool):
return "boolean"
if isinstance(val, (int, float)):
return "number"
return None
def _flag_arg_split(val):
return True if isinstance(val, list) else None | null |
1,147 | import numpy as np
from Orange.data import Table
from Orange.widgets import widget, gui
from Orange.widgets.utils import itemmodels
from Orange.widgets.utils.localization import pl
from Orange.widgets.utils.sql import check_sql_input
from Orange.widgets.utils.widgetpreview import WidgetPreview
from Orange.widgets.widget import Input, Output
from Orange.widgets.utils.annotated_data import (create_annotated_table)
class OWSelectByDataIndex(widget.OWWidget):
name = "Select by Data Index"
description = "Match instances by index from data subset."
category = "Transform"
icon = "icons/SelectByDataIndex.svg"
priority = 1112
class Inputs:
data = Input("Data", Table)
data_subset = Input("Data Subset", Table)
class Outputs:
matching_data = Output("Matching Data", Table, replaces=["Data"], default=True)
non_matching_data = Output("Unmatched Data", Table)
# avoiding the default annotated output name (Data), as it was used
# for Matching Data previously
annotated_data = Output("Annotated Data", Table)
want_main_area = False
buttons_area_orientation = None
resizing_enabled = False
class Warning(widget.OWWidget.Warning):
instances_not_matching = widget.Msg("Input tables do not share any instances.")
def __init__(self):
super().__init__()
self.data = None
self.data_subset = None
self.model = itemmodels.VariableListModel()
self.model_unique_with_id = itemmodels.VariableListModel()
self.extra_model_unique = itemmodels.VariableListModel()
self.extra_model_unique_with_id = itemmodels.VariableListModel()
box = gui.widgetBox(self.controlArea, True)
gui.label(
box, self, """
Data rows keep their identity even when some or all original variables
are replaced by variables computed from the original ones.
This widget gets two data tables ("Data" and "Data Subset") that
can be traced back to the same source. It selects all rows from Data
that appear in Data Subset, based on row identity and not actual data.
""".strip(), box=True)
@Inputs.data
@check_sql_input
def set_data(self, data):
self.data = data
@Inputs.data_subset
@check_sql_input
def METHOD_NAME(self, data):
self.data_subset = data
def handleNewSignals(self):
self._invalidate()
def commit(self):
self.Warning.instances_not_matching.clear()
subset_ids = []
if self.data_subset:
subset_ids = self.data_subset.ids
if not self.data:
matching_output = None
non_matching_output = None
annotated_output = None
else:
if self.data_subset and \
not np.intersect1d(subset_ids, self.data.ids).size:
self.Warning.instances_not_matching()
row_sel = np.in1d(self.data.ids, subset_ids)
matching_output = self.data[row_sel]
non_matching_output = self.data[~row_sel]
annotated_output = create_annotated_table(self.data, row_sel)
self.Outputs.matching_data.send(matching_output)
self.Outputs.non_matching_data.send(non_matching_output)
self.Outputs.annotated_data.send(annotated_output)
def _invalidate(self):
self.commit()
def send_report(self):
def data_info_text(data):
if data is None:
return "No data."
nvars = len(data.domain.variables) + len(data.domain.metas)
return f"{data.name}, " \
f"{len(data)} {pl(len(data), 'instance')}, " \
f"{nvars} {pl(nvars, 'variable')}"
self.report_items("",
[("Data", data_info_text(self.data)),
("Data Subset", data_info_text(self.data_subset))])
if __name__ == "__main__": # pragma: no cover
iris = Table("iris.tab")
WidgetPreview(OWSelectByDataIndex).run(
set_data=iris,
METHOD_NAME=iris[:20]) | null |
1,148 | # Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
from NeoVintageous.tests import unittest
from NeoVintageous.nv.vi.units import big_word_starts
class Test_big_word_starts_InNormalMode(unittest.ViewTestCase):
def test_move(self):
self.write('(foo) bar\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0), 6)
def test_move2(self):
self.write('(foo) bar fizz\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, count=2), 10)
def test_move3(self):
self.write(''.join(('(foo) bar\n',) * 5))
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, count=9), 46)
# We can assume the stuff tested for normal mode applies to internal normal mode, so we
# don't bother with that. Instead, we only test the differing behavior when advancing by
# word starts in internal normal.
class Test_big_word_starts_InInternalNormalMode_FromEmptyLine(unittest.ViewTestCase):
def test_move_to_line_with_leading_white_space(self):
self.write('\n (bar)\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True), 1)
def test_move_to_line_with_leading_white_space2(self):
self.write('\n(foo)\n \n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=2), 7)
def test_move_to_whitespace_line(self):
self.write('\n \n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, count=1, internal=True), 1)
def test_move_to_one_word_line(self):
self.write('\n(foo)\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=2), 7)
def test_move_and_swallow_last_newline_char(self):
self.write('\nfoo\n (bar)\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=3), 12)
# We can assume the stuff tested for normal mode applies to internal normal mode, so we
# don't bother with that. Instead, we only test the differing behavior when advancing by
# word starts in internal normal.
class Test_big_word_starts_InInternalNormalMode_FromOneWordLine(unittest.ViewTestCase):
def test_move_to_eol(self):
self.write('foo\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=1), 3)
def test_move_to_line_with_leading_white_space_from_word_start(self):
self.write('(foo)\n\nbar\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=2), 7)
def test_move_to_empty_line_from_word(self):
self.write('(foo)\n\nbar\n')
self.select(1)
self.assertEqual(big_word_starts(self.view, 1, internal=True, count=2), 6)
def test_move_to_one_word_line_from_word_start(self):
self.write('(foo)\nbar\nccc\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=2), 10)
def test_move_to_one_word_line_from_word(self):
self.write('(foo)\nbar\nccc\n')
self.select(1)
self.assertEqual(big_word_starts(self.view, 1, internal=True, count=2), 9)
def METHOD_NAME(self):
self.write('(foo)\n \nccc\n')
self.select(1)
self.assertEqual(big_word_starts(self.view, 1, internal=True, count=2), 12)
def test_move_to_whitespaceline_followed_by_leading_whitespace_from_word(self):
self.write('(foo)\n \n ccc\n')
self.select(1)
self.assertEqual(big_word_starts(self.view, 1, internal=True, count=2), 13)
def test_move_to_whitespaceline_followed_by_leading_whitespace_from_word_start(self):
self.write('(foo)\n \n ccc\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=2), 14)
class Test_big_word_starts_InInternalNormalMode_FromLine(unittest.ViewTestCase):
def test_move_to_eol(self):
self.write('foo bar\n')
self.select(0)
self.assertEqual(big_word_starts(self.view, 0, internal=True, count=2), 7) | null |
1,149 | """
This type stub file was generated by pyright.
"""
from kombu.utils.objects import cached_property
"""Task request.
This module defines the :class:`Request` class, that specifies
how tasks are executed.
"""
__all__ = ("Request",)
IS_PYPY = ...
logger = ...
_does_info = ...
_does_debug = ...
def __optimize__(): ...
tz_or_local = ...
send_revoked = ...
send_retry = ...
task_accepted = ...
task_ready = ...
revoked_tasks = ...
class Request:
"""A request for task execution."""
acknowledged = ...
time_start = ...
worker_pid = ...
time_limits = ...
_already_revoked = ...
_already_cancelled = ...
_terminate_on_ack = ...
_apply_result = ...
_tzlocal = ...
if notIS_PYPY:
__slots__ = ...
def __init__(
self,
message,
on_ack=...,
hostname=...,
eventer=...,
app=...,
connection_errors=...,
request_dict=...,
task=...,
on_reject=...,
body=...,
headers=...,
decoded=...,
utc=...,
maybe_make_aware=...,
maybe_iso8601=...,
**opts
) -> None: ...
@property
def delivery_info(self): ...
@property
def message(self): ...
@property
def request_dict(self): ...
@property
def body(self): ...
@property
def app(self): ...
@property
def utc(self): ...
@property
def METHOD_NAME(self): ...
@property
def content_encoding(self): ...
@property
def type(self): ...
@property
def root_id(self): ...
@property
def parent_id(self): ...
@property
def argsrepr(self): ...
@property
def args(self): ...
@property
def kwargs(self): ...
@property
def kwargsrepr(self): ...
@property
def on_ack(self): ...
@property
def on_reject(self): ...
@on_reject.setter
def on_reject(self, value): ...
@property
def hostname(self): ...
@property
def ignore_result(self): ...
@property
def eventer(self): ...
@eventer.setter
def eventer(self, eventer): ...
@property
def connection_errors(self): ...
@property
def task(self): ...
@property
def eta(self): ...
@property
def expires(self): ...
@expires.setter
def expires(self, value): ...
@property
def tzlocal(self): ...
@property
def store_errors(self): ...
@property
def task_id(self): ...
@task_id.setter
def task_id(self, value): ...
@property
def task_name(self): ...
@task_name.setter
def task_name(self, value): ...
@property
def reply_to(self): ...
@property
def replaced_task_nesting(self): ...
@property
def correlation_id(self): ...
def execute_using_pool(self, pool, **kwargs):
"""Used by the worker to send this task to the pool.
Arguments:
pool (~celery.concurrency.base.TaskPool): The execution pool
used to execute this request.
Raises:
celery.exceptions.TaskRevokedError: if the task was revoked.
"""
...
def execute(self, loglevel=..., logfile=...): # -> None:
"""Execute the task in a :func:`~celery.app.trace.trace_task`.
Arguments:
loglevel (int): The loglevel used by the task.
logfile (str): The logfile used by the task.
"""
...
def maybe_expire(self): # -> Literal[True] | None:
"""If expired, mark the task as revoked."""
...
def terminate(self, pool, signal=...): ...
def cancel(self, pool, signal=...): ...
def revoked(self): # -> bool:
"""If revoked, skip task and mark state."""
...
def send_event(self, type, **fields): ...
def on_accepted(self, pid, time_accepted): # -> None:
"""Handler called when task is accepted by worker pool."""
...
def on_timeout(self, soft, timeout): # -> None:
"""Handler called if the task times out."""
...
def on_success(self, failed__retval__runtime, **kwargs): # -> None:
"""Handler called if the task was successfully processed."""
...
def on_retry(self, exc_info): # -> None:
"""Handler called if the task should be retried."""
...
# -> None:
def on_failure(self, exc_info, send_failed_event=..., return_ok=...):
"""Handler called if the task raised an exception."""
...
def acknowledge(self): # -> None:
"""Acknowledge task."""
...
def reject(self, requeue=...): ...
def info(self, safe=...): ...
def humaninfo(self): ...
def __str__(self) -> str:
"""``str(self)``."""
...
def __repr__(self): # -> str:
"""``repr(self)``."""
...
@cached_property
def chord(self): ...
@cached_property
def errbacks(self): ...
@cached_property
def group(self): ...
@cached_property
def group_index(self): ...
def create_request_cls(
base,
task,
pool,
hostname,
eventer,
ref=...,
revoked_tasks=...,
task_ready=...,
trace=...,
app=...,
): # -> Type[Request]:
class Request(base): ... | null |
1,150 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkretailcloud.endpoint import endpoint_data
class CreateClusterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'retailcloud', '2018-03-13', 'CreateCluster')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_BusinessCode(self): # String
return self.get_query_params().get('BusinessCode')
def set_BusinessCode(self, BusinessCode): # String
self.add_query_param('BusinessCode', BusinessCode)
def get_CreateWithLogIntegration(self): # Boolean
return self.get_query_params().get('CreateWithLogIntegration')
def set_CreateWithLogIntegration(self, CreateWithLogIntegration): # Boolean
self.add_query_param('CreateWithLogIntegration', CreateWithLogIntegration)
def get_Vswitchidss(self): # RepeatList
return self.get_query_params().get('Vswitchids')
def set_Vswitchidss(self, Vswitchids): # RepeatList
for depth1 in range(len(Vswitchids)):
self.add_query_param('Vswitchids.' + str(depth1 + 1), Vswitchids[depth1])
def get_CloudMonitorFlags(self): # Integer
return self.get_query_params().get('CloudMonitorFlags')
def METHOD_NAME(self, CloudMonitorFlags): # Integer
self.add_query_param('CloudMonitorFlags', CloudMonitorFlags)
def get_ClusterEnvType(self): # String
return self.get_query_params().get('ClusterEnvType')
def set_ClusterEnvType(self, ClusterEnvType): # String
self.add_query_param('ClusterEnvType', ClusterEnvType)
def get_CreateWithArmsIntegration(self): # Boolean
return self.get_query_params().get('CreateWithArmsIntegration')
def set_CreateWithArmsIntegration(self, CreateWithArmsIntegration): # Boolean
self.add_query_param('CreateWithArmsIntegration', CreateWithArmsIntegration)
def get_KeyPair(self): # String
return self.get_query_params().get('KeyPair')
def set_KeyPair(self, KeyPair): # String
self.add_query_param('KeyPair', KeyPair)
def get_ClusterTitle(self): # String
return self.get_query_params().get('ClusterTitle')
def set_ClusterTitle(self, ClusterTitle): # String
self.add_query_param('ClusterTitle', ClusterTitle)
def get_PodCIDR(self): # String
return self.get_query_params().get('PodCIDR')
def set_PodCIDR(self, PodCIDR): # String
self.add_query_param('PodCIDR', PodCIDR)
def get_ClusterId(self): # Long
return self.get_query_params().get('ClusterId')
def set_ClusterId(self, ClusterId): # Long
self.add_query_param('ClusterId', ClusterId)
def get_ClusterType(self): # String
return self.get_query_params().get('ClusterType')
def set_ClusterType(self, ClusterType): # String
self.add_query_param('ClusterType', ClusterType)
def get_Password(self): # String
return self.get_query_params().get('Password')
def set_Password(self, Password): # String
self.add_query_param('Password', Password)
def get_SnatEntry(self): # Integer
return self.get_query_params().get('SnatEntry')
def set_SnatEntry(self, SnatEntry): # Integer
self.add_query_param('SnatEntry', SnatEntry)
def get_NetPlug(self): # String
return self.get_query_params().get('NetPlug')
def set_NetPlug(self, NetPlug): # String
self.add_query_param('NetPlug', NetPlug)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_RegionName(self): # String
return self.get_query_params().get('RegionName')
def set_RegionName(self, RegionName): # String
self.add_query_param('RegionName', RegionName)
def get_PrivateZone(self): # Boolean
return self.get_query_params().get('PrivateZone')
def set_PrivateZone(self, PrivateZone): # Boolean
self.add_query_param('PrivateZone', PrivateZone)
def get_ServiceCIDR(self): # String
return self.get_query_params().get('ServiceCIDR')
def set_ServiceCIDR(self, ServiceCIDR): # String
self.add_query_param('ServiceCIDR', ServiceCIDR)
def get_PublicSlb(self): # Integer
return self.get_query_params().get('PublicSlb')
def set_PublicSlb(self, PublicSlb): # Integer
self.add_query_param('PublicSlb', PublicSlb) | null |
1,151 | """This module contains utilities for longitudinal pipelines. See CAPS specifications for details about long ID."""
from pathlib import Path
from typing import List, Optional
def get_long_id(session_ids: List[str]) -> str:
"""Extract longitudinal ID from a list of session IDs.
This will create a unique identifier for a participant and
its corresponding sessions. Sessions labels are sorted alphabetically
before being merged in order to generate the longitudinal ID.
Parameters
----------
session_ids : list of str
List of session IDs (e.g. ["ses-M000"] or ["ses-M000", "ses-M018", "ses-M036"]).
Returns
-------
str :
Longitudinal ID.
Examples
--------
>>> from clinica.utils.longitudinal import get_long_id
>>> get_long_id(['ses-M000'])
'long-M000'
>>> get_long_id(['ses-M000', 'ses-M018', 'ses-M036'])
'long-M000M018M036'
>>> get_long_id(['ses-M018', 'ses-M036', 'ses-M000'])
'long-M000M018M036'
"""
if not all([session_id.startswith("ses-") for session_id in session_ids]):
raise ValueError(
"Expected a list of session IDs of the form ses-XXX, "
f"but received {session_ids} instead."
)
return "long-" + "".join(
[session_id.lstrip("ses-") for session_id in sorted(session_ids)]
)
def get_participants_long_id(
participant_ids: List[str], session_ids: List[str]
) -> List[str]:
"""Extract list of longitudinal IDs from a set of participant and session IDs.
Parameters
----------
participant_ids : list of str
List of participant IDs for which to compute the longitudinal IDs.
session_ids : list of str
List of session IDs for which to compute the longitudinal IDs.
Returns
-------
list of str :
The computed longitudinal IDs.
Examples
--------
>>> from clinica.utils.longitudinal import get_participants_long_id
>>> get_participants_long_id(['sub-CLNC01', 'sub-CLNC01', 'sub-CLNC02'], ['ses-M000', 'ses-M018', 'ses-M000'])
['long-M000M018', 'long-M000M018', 'long-M000']
"""
from .participant import get_unique_subjects
_, sessions_for_each_subject = get_unique_subjects(participant_ids, session_ids)
long_ids = []
for sessions in sessions_for_each_subject:
long_ids += [get_long_id(sessions)] * len(sessions)
return long_ids
def save_long_id(
session_ids: List[str],
output_dir: Path,
file_name: Optional[str] = None,
) -> None:
"""Save the list of session IDs to given `file_name`.
Parameters
----------
session_ids : list of str
The list of session IDs to save.
output_dir : Path
The path to the output directory in which to save the session IDs.
file_name : str, optional
The file name to use. If None, this will be computed as:
'{longitudinal_id}_sessions.tsv'
"""
if not output_dir.exists():
output_dir.mkdir(parents=True)
file_name = file_name or f"{get_long_id(session_ids)}_sessions.tsv"
content = "\n".join(sorted(session_ids))
with open(output_dir / file_name, "w") as fp:
fp.write(f"session_id\n{content}\n")
def METHOD_NAME(
caps_dir: Path,
participant_id: str,
longitudinal_id: str,
) -> List[str]:
"""Extract sessions IDs from `caps_dir`/subjects/`participant_id`/`long_id`/`long_id`_sessions.tsv.
Parameters
----------
caps_dir : Path
Path to CAPS folder.
participant_id : str
ID of subject for which to extract session IDs.
longitudinal_id : str
Longitudinal ID for which to extract session IDs.
Returns
-------
List of str :
The extracted list of session IDs.
Raises
------
ClinicaException :
If expected session TSV file does not exist.
If 'session_id' is not in the session dataframe.
"""
import pandas as pd
from clinica.utils.exceptions import ClinicaException
sessions_file = (
caps_dir
/ "subjects"
/ participant_id
/ longitudinal_id
/ f"{longitudinal_id}_sessions.tsv"
)
if not sessions_file.is_file():
raise ClinicaException(
"The TSV file with sessions associated "
f"to {participant_id} for longitudinal ID {longitudinal_id} is missing "
f"(expected path: {sessions_file})."
)
df = pd.read_csv(sessions_file, sep="\t")
if "session_id" not in df.columns:
raise ClinicaException(
f"The TSV file does not contain session_id column (path: {sessions_file})."
)
return list(df.session_id) | null |
1,152 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class ModifyPrepayInstanceSpecRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'ModifyPrepayInstanceSpec','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_OperatorType(self): # String
return self.get_query_params().get('OperatorType')
def set_OperatorType(self, OperatorType): # String
self.add_query_param('OperatorType', OperatorType)
def get_SystemDiskCategory(self): # String
return self.get_query_params().get('SystemDisk.Category')
def set_SystemDiskCategory(self, SystemDiskCategory): # String
self.add_query_param('SystemDisk.Category', SystemDiskCategory)
def get_RebootTime(self): # String
return self.get_query_params().get('RebootTime')
def set_RebootTime(self, RebootTime): # String
self.add_query_param('RebootTime', RebootTime)
def get_MigrateAcrossZone(self): # Boolean
return self.get_query_params().get('MigrateAcrossZone')
def set_MigrateAcrossZone(self, MigrateAcrossZone): # Boolean
self.add_query_param('MigrateAcrossZone', MigrateAcrossZone)
def get_InstanceType(self): # String
return self.get_query_params().get('InstanceType')
def set_InstanceType(self, InstanceType): # String
self.add_query_param('InstanceType', InstanceType)
def get_ModifyMode(self): # String
return self.get_query_params().get('ModifyMode')
def set_ModifyMode(self, ModifyMode): # String
self.add_query_param('ModifyMode', ModifyMode)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_RebootWhenFinished(self): # Boolean
return self.get_query_params().get('RebootWhenFinished')
def set_RebootWhenFinished(self, RebootWhenFinished): # Boolean
self.add_query_param('RebootWhenFinished', RebootWhenFinished)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def METHOD_NAME(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_EndTime(self): # String
return self.get_query_params().get('EndTime')
def set_EndTime(self, EndTime): # String
self.add_query_param('EndTime', EndTime)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_Disks(self): # RepeatList
return self.get_query_params().get('Disk')
def set_Disks(self, Disk): # RepeatList
for depth1 in range(len(Disk)):
if Disk[depth1].get('PerformanceLevel') is not None:
self.add_query_param('Disk.' + str(depth1 + 1) + '.PerformanceLevel', Disk[depth1].get('PerformanceLevel'))
if Disk[depth1].get('DiskId') is not None:
self.add_query_param('Disk.' + str(depth1 + 1) + '.DiskId', Disk[depth1].get('DiskId'))
if Disk[depth1].get('Category') is not None:
self.add_query_param('Disk.' + str(depth1 + 1) + '.Category', Disk[depth1].get('Category'))
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId) | null |
1,153 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkunimkt.endpoint import endpoint_data
class ListMediaNameRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'UniMkt', '2018-12-12', 'ListMediaName')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_OriginSiteUserId(self): # String
return self.get_query_params().get('OriginSiteUserId')
def set_OriginSiteUserId(self, OriginSiteUserId): # String
self.add_query_param('OriginSiteUserId', OriginSiteUserId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_MediaName(self): # String
return self.get_query_params().get('MediaName')
def set_MediaName(self, MediaName): # String
self.add_query_param('MediaName', MediaName)
def get_AppName(self): # String
return self.get_query_params().get('AppName')
def set_AppName(self, AppName): # String
self.add_query_param('AppName', AppName)
def get_TenantId(self): # String
return self.get_query_params().get('TenantId')
def set_TenantId(self, TenantId): # String
self.add_query_param('TenantId', TenantId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AccessStatus(self): # String
return self.get_query_params().get('AccessStatus')
def set_AccessStatus(self, AccessStatus): # String
self.add_query_param('AccessStatus', AccessStatus)
def get_FirstScene(self): # String
return self.get_query_params().get('FirstScene')
def set_FirstScene(self, FirstScene): # String
self.add_query_param('FirstScene', FirstScene)
def get_EndCreateTime(self): # Long
return self.get_query_params().get('EndCreateTime')
def set_EndCreateTime(self, EndCreateTime): # Long
self.add_query_param('EndCreateTime', EndCreateTime)
def get_Business(self): # String
return self.get_query_params().get('Business')
def set_Business(self, Business): # String
self.add_query_param('Business', Business)
def get_Os(self): # String
return self.get_query_params().get('Os')
def set_Os(self, Os): # String
self.add_query_param('Os', Os)
def get_MediaStatus(self): # String
return self.get_query_params().get('MediaStatus')
def set_MediaStatus(self, MediaStatus): # String
self.add_query_param('MediaStatus', MediaStatus)
def get_Environment(self): # String
return self.get_query_params().get('Environment')
def set_Environment(self, Environment): # String
self.add_query_param('Environment', Environment)
def get_StartCreateTime(self): # Long
return self.get_query_params().get('StartCreateTime')
def set_StartCreateTime(self, StartCreateTime): # Long
self.add_query_param('StartCreateTime', StartCreateTime)
def get_UserSite(self): # String
return self.get_query_params().get('UserSite')
def set_UserSite(self, UserSite): # String
self.add_query_param('UserSite', UserSite)
def get_SecondScene(self): # String
return self.get_query_params().get('SecondScene')
def set_SecondScene(self, SecondScene): # String
self.add_query_param('SecondScene', SecondScene)
def METHOD_NAME(self): # String
return self.get_query_params().get('MediaType')
def set_MediaType(self, MediaType): # String
self.add_query_param('MediaType', MediaType) | null |
1,154 | from __future__ import annotations
from itertools import product
import pandas as pd
import pytest
import dask.dataframe as dd
from dask.dataframe.utils import assert_eq
def resample(df, freq, how="mean", **kwargs):
return getattr(df.resample(freq, **kwargs), how)()
@pytest.mark.parametrize(
["obj", "method", "npartitions", "freq", "closed", "label"],
list(
product(
["series", "frame"],
["count", "mean", "ohlc"],
[2, 5],
["30T", "h", "d", "w", "M"],
["right", "left"],
["right", "left"],
)
),
)
def test_series_resample(obj, method, npartitions, freq, closed, label):
index = pd.date_range("1-1-2000", "2-15-2000", freq="h")
index = index.union(pd.date_range("4-15-2000", "5-15-2000", freq="h"))
if obj == "series":
ps = pd.Series(range(len(index)), index=index)
elif obj == "frame":
ps = pd.DataFrame({"a": range(len(index))}, index=index)
ds = dd.from_pandas(ps, npartitions=npartitions)
# Series output
result = resample(ds, freq, how=method, closed=closed, label=label)
expected = resample(ps, freq, how=method, closed=closed, label=label)
assert_eq(result, expected, check_dtype=False)
divisions = result.divisions
assert expected.index[0] == divisions[0]
assert expected.index[-1] == divisions[-1]
@pytest.mark.parametrize("method", ["count", "nunique", "size", "sum"])
def test_resample_has_correct_fill_value(method):
index = pd.date_range("2000-01-01", "2000-02-15", freq="h")
index = index.union(pd.date_range("4-15-2000", "5-15-2000", freq="h"))
ps = pd.Series(range(len(index)), index=index)
ds = dd.from_pandas(ps, npartitions=2)
assert_eq(
getattr(ds.resample("30min"), method)(), getattr(ps.resample("30min"), method)()
)
def test_resample_agg():
index = pd.date_range("2000-01-01", "2000-02-15", freq="h")
ps = pd.Series(range(len(index)), index=index)
ds = dd.from_pandas(ps, npartitions=2)
assert_eq(ds.resample("10min").agg("mean"), ps.resample("10min").agg("mean"))
assert_eq(
ds.resample("10min").agg(["mean", "min"]),
ps.resample("10min").agg(["mean", "min"]),
)
def test_resample_agg_passes_kwargs():
index = pd.date_range("2000-01-01", "2000-02-15", freq="h")
ps = pd.Series(range(len(index)), index=index)
ds = dd.from_pandas(ps, npartitions=2)
def foo(series, bar=1, *args, **kwargs):
return bar
assert_eq(ds.resample("2h").agg(foo, bar=2), ps.resample("2h").agg(foo, bar=2))
assert (ds.resample("2h").agg(foo, bar=2) == 2).compute().all()
def test_resample_throws_error_when_parition_index_does_not_match_index():
index = pd.date_range("1-1-2000", "2-15-2000", freq="D")
index = index.union(pd.date_range("4-15-2000", "5-15-2000", freq="D"))
ps = pd.Series(range(len(index)), index=index)
ds = dd.from_pandas(ps, npartitions=5)
with pytest.raises(ValueError, match="Index is not contained within new index."):
ds.resample("2M").count().compute()
def test_resample_pads_last_division_to_avoid_off_by_one():
# https://github.com/dask/dask/issues/6230
times = [
1545362463409128000,
1545362504369352000,
1545362545326966000,
1545363118769636000,
1545363159726490000,
1545363200687178000,
1545363241648824000,
1573318190393973000,
1573318231353350000,
1573318272313774000,
1573318313275299000,
1573318354233962000,
1573318395195456000,
1573318436154609000,
1580687544437145000,
1580687585394881000,
1580687667316809000,
1580687708275414000,
1580687790195742000,
1580687831154951000,
1580687872115363000,
1580687954035133000,
1559127673402811000,
]
df = pd.DataFrame({"Time": times, "Counts": range(len(times))})
df["Time"] = pd.to_datetime(df["Time"], utc=True)
expected = df.set_index("Time").resample("1Q").size()
ddf = dd.from_pandas(df, npartitions=2).set_index("Time")
actual = ddf.resample("1Q").size().compute()
assert_eq(actual, expected)
def test_resample_does_not_evenly_divide_day():
import numpy as np
index = pd.date_range("2012-01-02", "2012-02-02", freq="H")
index = index.union(pd.date_range("2012-03-02", "2012-04-02", freq="H"))
df = pd.DataFrame({"p": np.random.random(len(index))}, index=index)
ddf = dd.from_pandas(df, npartitions=5)
# Frequency doesn't evenly divide day
expected = df.resample("2D").count()
result = ddf.resample("2D").count().compute()
assert_eq(result, expected)
def test_series_resample_does_not_evenly_divide_day():
index = pd.date_range("2012-01-02 00:00:00", "2012-01-02 01:00:00", freq="T")
index = index.union(
pd.date_range("2012-01-02 06:00:00", "2012-01-02 08:00:00", freq="T")
)
s = pd.Series(range(len(index)), index=index)
ds = dd.from_pandas(s, npartitions=5)
# Frequency doesn't evenly divide day
expected = s.resample("57T").mean()
result = ds.resample("57T").mean().compute()
assert_eq(result, expected)
def test_unknown_divisions_error():
df = pd.DataFrame({"x": [1, 2, 3]})
ddf = dd.from_pandas(df, npartitions=2, sort=False)
try:
ddf.x.resample("1m").mean()
assert False
except ValueError as e:
assert "divisions" in str(e)
def METHOD_NAME():
from datetime import datetime, timedelta
import numpy as np
date_today = datetime.now()
days = pd.date_range(date_today, date_today + timedelta(20), freq="D")
data = np.random.randint(1, high=100, size=len(days))
df = pd.DataFrame({"date": days, "values": data})
df = df.set_index("date")
ddf = dd.from_pandas(df, npartitions=4)
assert ddf.resample("D").mean().head().index.name == "date"
def test_series_resample_non_existent_datetime():
index = [
pd.Timestamp("2016-10-15 00:00:00"),
pd.Timestamp("2016-10-16 10:00:00"),
pd.Timestamp("2016-10-17 00:00:00"),
]
df = pd.DataFrame([[1], [2], [3]], index=index)
df.index = df.index.tz_localize("America/Sao_Paulo")
ddf = dd.from_pandas(df, npartitions=1)
result = ddf.resample("1D").mean()
expected = df.resample("1D").mean()
assert_eq(result, expected, check_freq=False)
@pytest.mark.parametrize("agg", ["nunique", "mean", "count", "size", "quantile"])
def test_common_aggs(agg):
index = pd.date_range("2000-01-01", "2000-02-15", freq="h")
ps = pd.Series(range(len(index)), index=index)
ds = dd.from_pandas(ps, npartitions=2)
f = lambda df: getattr(df, agg)()
res = f(ps.resample("1d"))
expected = f(ds.resample("1d"))
assert_eq(res, expected, check_dtype=False) | null |
1,155 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class ApplyPhysicalConnectionLOARequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'ApplyPhysicalConnectionLOA','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def METHOD_NAME(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_LineType(self): # String
return self.get_query_params().get('LineType')
def set_LineType(self, LineType): # String
self.add_query_param('LineType', LineType)
def get_Si(self): # String
return self.get_query_params().get('Si')
def set_Si(self, Si): # String
self.add_query_param('Si', Si)
def get_PeerLocation(self): # String
return self.get_query_params().get('PeerLocation')
def set_PeerLocation(self, PeerLocation): # String
self.add_query_param('PeerLocation', PeerLocation)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Bandwidth(self): # Integer
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self, Bandwidth): # Integer
self.add_query_param('Bandwidth', Bandwidth)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_ConstructionTime(self): # String
return self.get_query_params().get('ConstructionTime')
def set_ConstructionTime(self, ConstructionTime): # String
self.add_query_param('ConstructionTime', ConstructionTime)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_CompanyName(self): # String
return self.get_query_params().get('CompanyName')
def set_CompanyName(self, CompanyName): # String
self.add_query_param('CompanyName', CompanyName)
def get_PMInfos(self): # RepeatList
return self.get_query_params().get('PMInfo')
def set_PMInfos(self, PMInfo): # RepeatList
for depth1 in range(len(PMInfo)):
if PMInfo[depth1].get('PMCertificateNo') is not None:
self.add_query_param('PMInfo.' + str(depth1 + 1) + '.PMCertificateNo', PMInfo[depth1].get('PMCertificateNo'))
if PMInfo[depth1].get('PMName') is not None:
self.add_query_param('PMInfo.' + str(depth1 + 1) + '.PMName', PMInfo[depth1].get('PMName'))
if PMInfo[depth1].get('PMCertificateType') is not None:
self.add_query_param('PMInfo.' + str(depth1 + 1) + '.PMCertificateType', PMInfo[depth1].get('PMCertificateType'))
if PMInfo[depth1].get('PMGender') is not None:
self.add_query_param('PMInfo.' + str(depth1 + 1) + '.PMGender', PMInfo[depth1].get('PMGender'))
if PMInfo[depth1].get('PMContactInfo') is not None:
self.add_query_param('PMInfo.' + str(depth1 + 1) + '.PMContactInfo', PMInfo[depth1].get('PMContactInfo')) | null |
1,156 | import re
from django.apps import apps
from rest_framework.exceptions import NotFound, PermissionDenied
from rest_framework import generics, permissions as drf_permissions
from framework.auth.oauth_scopes import CoreScopes
from api.addons.serializers import AddonSerializer
from api.base.filters import ListFilterMixin
from api.base.pagination import MaxSizePagination
from api.base.permissions import TokenHasScope
from api.base.settings import ADDONS_OAUTH
from api.base.views import JSONAPIBaseView
from website import settings as osf_settings
class AddonSettingsMixin(object):
"""Mixin with convenience method for retrieving the current <Addon><Node|User>Settings based on the
current URL. By default, fetches the settings based on the user or node available in self context.
"""
def get_addon_settings(self, provider=None, fail_if_absent=True, check_object_permissions=True):
owner = None
provider = provider or self.kwargs['provider']
if hasattr(self, 'get_user'):
owner = self.get_user()
owner_type = 'user'
elif hasattr(self, 'get_node'):
owner = self.get_node()
owner_type = 'node'
try:
addon_module = apps.get_app_config('addons_{}'.format(provider))
except LookupError:
raise NotFound('Requested addon unrecognized')
if not owner or provider not in ADDONS_OAUTH or owner_type not in addon_module.owners:
raise NotFound('Requested addon unavailable')
addon_settings = owner.get_addon(provider)
if not addon_settings and fail_if_absent:
raise NotFound('Requested addon not enabled')
if not addon_settings or addon_settings.deleted:
return None
if addon_settings and check_object_permissions:
authorizer = None
if owner_type == 'user':
authorizer = addon_settings.owner
elif getattr(addon_settings, 'user_settings', None):
authorizer = addon_settings.user_settings.owner
if authorizer and authorizer != self.request.user:
raise PermissionDenied('Must be addon authorizer to list folders')
return addon_settings
class AddonList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/addons_list).
"""
permission_classes = (
drf_permissions.AllowAny,
drf_permissions.IsAuthenticatedOrReadOnly,
TokenHasScope, )
required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
required_write_scopes = [CoreScopes.NULL]
pagination_class = MaxSizePagination
serializer_class = AddonSerializer
view_category = 'addons'
view_name = 'addon-list'
ordering = ()
def METHOD_NAME(self):
return [conf for conf in osf_settings.ADDONS_AVAILABLE_DICT.values() if 'accounts' in conf.configs]
def get_queryset(self):
return self.get_queryset_from_request()
def param_queryset(self, query_params, default_queryset):
"""filters default queryset based on query parameters"""
filters = self.parse_query_params(query_params)
queryset = set(default_queryset)
if filters:
for key, field_names in filters.items():
match = self.QUERY_PATTERN.match(key)
fields = match.groupdict()['fields']
statement = len(re.findall(self.FILTER_FIELDS, fields)) > 1 # This indicates an OR statement
sub_query = set() if statement else set(default_queryset)
for field_name, data in field_names.items():
operations = data if isinstance(data, list) else [data]
for operation in operations:
if statement:
sub_query = sub_query.union(set(self.get_filtered_queryset(field_name, operation, list(default_queryset))))
else:
sub_query = sub_query.intersection(set(self.get_filtered_queryset(field_name, operation, list(default_queryset))))
queryset = sub_query.intersection(queryset)
return list(queryset) | null |
1,157 | # Test methods with long descriptive names can omit docstrings
# pylint: disable=missing-docstring
import io
import os
import pickle
import shutil
import tempfile
import unittest
import warnings
from unittest.mock import Mock, patch
from Orange import data
from Orange.data.io import FileFormat, TabReader, CSVReader, PickleReader
from Orange.data.io_base import PICKLE_PROTOCOL
from Orange.data.table import get_sample_datasets_dir
from Orange.data import Table
from Orange.tests import test_dirname
from Orange.util import OrangeDeprecationWarning
class WildcardReader(FileFormat):
EXTENSIONS = ('.wild', '.wild[0-9]')
DESCRIPTION = "Dummy reader for testing extensions"
def read(self):
pass
class TestChooseReader(unittest.TestCase):
def test_usual_extensions(self):
self.assertIsInstance(FileFormat.get_reader("t.tab"), TabReader)
self.assertIsInstance(FileFormat.get_reader("t.csv"), CSVReader)
self.assertIsInstance(FileFormat.get_reader("t.pkl"), PickleReader)
with self.assertRaises(OSError):
FileFormat.get_reader("test.undefined_extension")
def test_wildcard_extension(self):
self.assertIsInstance(FileFormat.get_reader("t.wild"),
WildcardReader)
self.assertIsInstance(FileFormat.get_reader("t.wild2"),
WildcardReader)
with self.assertRaises(OSError):
FileFormat.get_reader("t.wild2a")
class SameExtension(FileFormat):
PRIORITY = 100
EXTENSIONS = ('.same_extension',)
DESCRIPTION = "Same extension, different priority"
def read(self):
pass
class SameExtensionPreferred(SameExtension):
PRIORITY = 90
class SameExtensionL(SameExtension):
PRIORITY = 110
class TestMultipleSameExtension(unittest.TestCase):
def test_find_reader(self):
reader = FileFormat.get_reader("some.same_extension")
self.assertIsInstance(reader, SameExtensionPreferred)
class TestLocate(unittest.TestCase):
def test_locate_sample_datasets(self):
with self.assertRaises(OSError):
FileFormat.locate("iris.tab",
search_dirs=[os.path.dirname(__file__)])
iris = FileFormat.locate("iris.tab",
search_dirs=[get_sample_datasets_dir()])
self.assertEqual(os.path.basename(iris), "iris.tab")
# test extension adding
iris = FileFormat.locate("iris",
search_dirs=[get_sample_datasets_dir()])
self.assertEqual(os.path.basename(iris), "iris.tab")
def test_locate_wildcard_extension(self):
tempdir = tempfile.mkdtemp()
with self.assertRaises(OSError):
FileFormat.locate("t.wild9", search_dirs=[tempdir])
fn = os.path.join(tempdir, "t.wild8")
with open(fn, "wt") as f:
f.write("\n")
location = FileFormat.locate("t.wild8", search_dirs=[tempdir])
self.assertEqual(location, fn)
# test extension adding
location = FileFormat.locate("t", search_dirs=[tempdir])
self.assertEqual(location, fn)
shutil.rmtree(tempdir)
class TestReader(unittest.TestCase):
def setUp(self):
data.table.dataset_dirs.append(test_dirname())
def METHOD_NAME(self):
data.table.dataset_dirs.remove(test_dirname())
def test_open_bad_pickle(self):
"""
Raise TypeError when PickleReader reads a pickle
file without a table (and it suppose to be there).
GH-2232
"""
reader = PickleReader("")
with unittest.mock.patch("pickle.load", return_value=None):
self.assertRaises(TypeError, reader.read, "foo")
def test_empty_columns(self):
"""Can't read files with more columns then headers. GH-1417"""
samplefile = """\
a, b
1, 0,
1, 2,
"""
c = io.StringIO(samplefile)
with self.assertWarns(UserWarning) as cm:
table = CSVReader(c).read()
self.assertEqual(len(table.domain.attributes), 2)
self.assertEqual(cm.warning.args[0],
"Columns with no headers were removed.")
def test_type_annotations(self):
class FooFormat(FileFormat):
write_file = Mock()
FooFormat.write('test_file', None)
FooFormat.write_file.assert_called_with('test_file', None)
FooFormat.OPTIONAL_TYPE_ANNOTATIONS = True
FooFormat.write('test_file', None)
FooFormat.write_file.assert_called_with('test_file', None, True)
FooFormat.write('test_file', None, False)
FooFormat.write_file.assert_called_with('test_file', None, False)
FooFormat.OPTIONAL_TYPE_ANNOTATIONS = False
FooFormat.write('test_file', None)
FooFormat.write_file.assert_called_with('test_file', None)
@patch('csv.DictWriter.writerow')
def test_header_call(self, writer):
CSVReader.write_headers(writer, Table("iris"), False)
self.assertEqual(len(writer.call_args_list), 1)
writer.reset_mock()
CSVReader.write_headers(writer, Table("iris"), True)
self.assertEqual(len(writer.call_args_list), 3)
def test_load_pickle(self):
"""
This function tests whether pickled files in older Orange loads
correctly with newer version of Orange.
"""
with warnings.catch_warnings():
# in unittests on travis/github actions OrangeDeprecationWarning
# is raised as an error. With this statement it is disabled only
# for this test - when unpickling pickle created with version older
# than 3.27 ordered parameter in DiscreteVariable which is
# deprecated still appears - which will raise deprecation warning
warnings.simplefilter('default', OrangeDeprecationWarning)
# load pickles created with Orange 3.20
# in next version there is a change in variables.py - line 738
# which broke back compatibility - tests introduced after the fix
data1 = Table("datasets/sailing-orange-3-20.pkl")
data2 = Table("datasets/sailing-orange-3-20.pkl.gz")
# load pickles created with Orange 3.21
data3 = Table("datasets/sailing-orange-3-21.pkl")
data4 = Table("datasets/sailing-orange-3-21.pkl.gz")
examples_count = 20
self.assertEqual(examples_count, len(data1))
self.assertEqual(examples_count, len(data2))
self.assertEqual(examples_count, len(data3))
self.assertEqual(examples_count, len(data4))
attributes_count = 3
self.assertEqual(attributes_count, len(data1.domain.attributes))
self.assertEqual(attributes_count, len(data2.domain.attributes))
self.assertEqual(attributes_count, len(data3.domain.attributes))
self.assertEqual(attributes_count, len(data4.domain.attributes))
def test_pickle_version(self):
"""
Orange uses a fixed PICKLE_PROTOCOL (currently set to 4)
for pickling data files and possibly elsewhere for consistent
behaviour across different python versions (e.g. 3.6 - 3.8).
When the default protocol is increased in a future version of python
we should consider increasing this constant to match it as well.
"""
# we should use a version that is at least as high as the default.
# it could be higher for older (but supported) python versions
self.assertGreaterEqual(PICKLE_PROTOCOL, pickle.DEFAULT_PROTOCOL)
# we should not use a version that is not supported
self.assertLessEqual(PICKLE_PROTOCOL, pickle.HIGHEST_PROTOCOL)
if __name__ == "__main__":
unittest.main() | null |
1,158 | """ search views"""
import re
from django.contrib.postgres.search import TrigramSimilarity
from django.core.paginator import Paginator
from django.db.models.functions import Greatest
from django.http import JsonResponse
from django.template.response import TemplateResponse
from django.views import View
from csp.decorators import csp_update
from bookwyrm import models
from bookwyrm.connectors import connector_manager
from bookwyrm.book_search import search, format_search_result
from bookwyrm.settings import PAGE_LENGTH
from bookwyrm.utils import regex
from .helpers import is_api_request
from .helpers import handle_remote_webfinger
# pylint: disable= no-self-use
class Search(View):
"""search users or books"""
@csp_update(IMG_SRC="*")
def get(self, request):
"""that search bar up top"""
if is_api_request(request):
return api_book_search(request)
query = request.GET.get("q")
if not query:
return TemplateResponse(request, "search/book.html")
search_type = request.GET.get("type")
if query and not search_type:
search_type = "user" if "@" in query else "book"
endpoints = {
"book": book_search,
"user": METHOD_NAME,
"list": list_search,
}
if not search_type in endpoints:
search_type = "book"
return endpoints[search_type](request)
def api_book_search(request):
"""Return books via API response"""
query = request.GET.get("q")
query = isbn_check(query)
min_confidence = request.GET.get("min_confidence", 0)
# only return local book results via json so we don't cascade
book_results = search(query, min_confidence=min_confidence)
return JsonResponse(
[format_search_result(r) for r in book_results[:10]], safe=False
)
def book_search(request):
"""the real business is elsewhere"""
query = request.GET.get("q")
# check if query is isbn
query = isbn_check(query)
min_confidence = request.GET.get("min_confidence", 0)
search_remote = request.GET.get("remote", False) and request.user.is_authenticated
# try a local-only search
local_results = search(query, min_confidence=min_confidence)
paginated = Paginator(local_results, PAGE_LENGTH)
page = paginated.get_page(request.GET.get("page"))
data = {
"query": query,
"results": page,
"type": "book",
"remote": search_remote,
"page_range": paginated.get_elided_page_range(
page.number, on_each_side=2, on_ends=1
),
}
# if a logged in user requested remote results or got no local results, try remote
if request.user.is_authenticated and (not local_results or search_remote):
data["remote_results"] = connector_manager.search(
query, min_confidence=min_confidence
)
data["remote"] = True
return TemplateResponse(request, "search/book.html", data)
def METHOD_NAME(request):
"""cool kids members only user search"""
viewer = request.user
query = request.GET.get("q")
query = query.strip()
data = {"type": "user", "query": query}
# logged out viewers can't search users
if not viewer.is_authenticated:
return TemplateResponse(request, "search/user.html", data)
# use webfinger for mastodon style [email protected] username to load the user if
# they don't exist locally (handle_remote_webfinger will check the db)
if re.match(regex.FULL_USERNAME, query):
handle_remote_webfinger(query)
results = (
models.User.viewer_aware_objects(viewer)
.annotate(
similarity=Greatest(
TrigramSimilarity("username", query),
TrigramSimilarity("localname", query),
)
)
.filter(
similarity__gt=0.5,
)
.order_by("-similarity")
)
paginated = Paginator(results, PAGE_LENGTH)
page = paginated.get_page(request.GET.get("page"))
data["results"] = page
data["page_range"] = paginated.get_elided_page_range(
page.number, on_each_side=2, on_ends=1
)
return TemplateResponse(request, "search/user.html", data)
def list_search(request):
"""any relevent lists?"""
query = request.GET.get("q")
data = {"query": query, "type": "list"}
results = (
models.List.privacy_filter(
request.user,
privacy_levels=["public", "followers"],
)
.annotate(
similarity=Greatest(
TrigramSimilarity("name", query),
TrigramSimilarity("description", query),
)
)
.filter(
similarity__gt=0.1,
)
.order_by("-similarity")
)
paginated = Paginator(results, PAGE_LENGTH)
page = paginated.get_page(request.GET.get("page"))
data["results"] = page
data["page_range"] = paginated.get_elided_page_range(
page.number, on_each_side=2, on_ends=1
)
return TemplateResponse(request, "search/list.html", data)
def isbn_check(query):
"""isbn10 or isbn13 check, if so remove separators"""
if query:
su_num = re.sub(r"(?<=\d)\D(?=\d|[xX])", "", query)
if len(su_num) == 13 and su_num.isdecimal():
# Multiply every other digit by 3
# Add these numbers and the other digits
product = sum(int(ch) for ch in su_num[::2]) + sum(
int(ch) * 3 for ch in su_num[1::2]
)
if product % 10 == 0:
return su_num
elif (
len(su_num) == 10
and su_num[:-1].isdecimal()
and (su_num[-1].isdecimal() or su_num[-1].lower() == "x")
):
product = 0
# Iterate through code_string
for i in range(9):
# for each character, multiply by a different decreasing number: 10 - x
product = product + int(su_num[i]) * (10 - i)
# Handle last character
if su_num[9].lower() == "x":
product += 10
else:
product += int(su_num[9])
if product % 11 == 0:
return su_num
return query | null |
1,159 | import pandas as pd
from base_test import ArkoudaTest
from context import arkouda as ak
class SeriesTest(ArkoudaTest):
def test_series_creation(self):
# Use positional arguments
ar_tuple = ak.arange(3), ak.arange(3)
s = ak.Series(ar_tuple)
self.assertIsInstance(s, ak.Series)
ar_tuple = ak.array(["A", "B", "C"]), ak.arange(3)
s = ak.Series(ar_tuple)
self.assertIsInstance(s, ak.Series)
# Both data and index are supplied
v = ak.array(["A", "B", "C"])
i = ak.arange(3)
s = ak.Series(data=v, index=i)
self.assertIsInstance(s, ak.Series)
self.assertIsInstance(s.index, ak.Index)
# Just data is supplied
s = ak.Series(data=v)
self.assertIsInstance(s, ak.Series)
self.assertIsInstance(s.index, ak.Index)
# Just index is supplied (keyword argument)
with self.assertRaises(TypeError):
s = ak.Series(index=i)
# Just data is supplied (positional argument)
s = ak.Series(ak.array(["A", "B", "C"]))
self.assertIsInstance(s, ak.Series)
# Just index is supplied (ar_tuple argument)
ar_tuple = (ak.arange(3),)
with self.assertRaises(TypeError):
s = ak.Series(ar_tuple)
# No arguments are supplied
with self.assertRaises(TypeError):
s = ak.Series()
with self.assertRaises(ValueError):
s = ak.Series(data=ak.arange(3), index=ak.arange(6))
def test_lookup(self):
v = ak.array(["A", "B", "C"])
i = ak.arange(3)
s = ak.Series(data=v, index=i)
lk = s.locate(1)
self.assertIsInstance(lk, ak.Series)
self.assertEqual(lk.index[0], 1)
self.assertEqual(lk.values[0], "B")
lk = s.locate([0, 2])
self.assertIsInstance(lk, ak.Series)
self.assertEqual(lk.index[0], 0)
self.assertEqual(lk.values[0], "A")
self.assertEqual(lk.index[1], 2)
self.assertEqual(lk.values[1], "C")
# testing index lookup
i = ak.Index([1])
lk = s.locate(i)
self.assertIsInstance(lk, ak.Series)
self.assertListEqual(lk.index.to_list(), i.index.to_list())
self.assertEqual(lk.values[0], v[1])
i = ak.Index([0, 2])
lk = s.locate(i)
self.assertIsInstance(lk, ak.Series)
self.assertListEqual(lk.index.to_list(), i.index.to_list())
self.assertEqual(lk.values.to_list(), v[ak.array([0,2])].to_list())
# testing multi-index lookup
mi = ak.MultiIndex([ak.arange(3), ak.array([2, 1, 0])])
s = ak.Series(data=v, index=mi)
lk = s.locate(mi[0])
self.assertIsInstance(lk, ak.Series)
self.assertListEqual(lk.index.index, mi[0].index)
self.assertEqual(lk.values[0], v[0])
# ensure error with scalar and multi-index
with self.assertRaises(TypeError):
lk = s.locate(0)
with self.assertRaises(TypeError):
lk = s.locate([0,2])
def test_shape(self):
v = ak.array(["A", "B", "C"])
i = ak.arange(3)
s = ak.Series(data=v, index=i)
(l,) = s.shape
self.assertEqual(l, 3)
def test_add(self):
ar_tuple = (ak.arange(3), ak.arange(3))
ar_tuple_add = (ak.arange(3, 6, 1), ak.arange(3, 6, 1))
i = ak.arange(3)
v = ak.arange(3, 6, 1)
s = ak.Series(data=i, index=i)
s_add = ak.Series(data=v, index=v)
added = s.add(s_add)
idx_list = added.index.to_pandas().tolist()
val_list = added.values.to_list()
for i in range(6):
self.assertIn(i, idx_list)
self.assertIn(i, val_list)
def METHOD_NAME(self):
v = ak.arange(3)
i = ak.arange(3)
s = ak.Series(data=v, index=i)
top = s.topn(2)
self.assertListEqual(top.index.to_pandas().tolist(), [2, 1])
self.assertListEqual(top.values.to_list(), [2, 1])
def test_sort_idx(self):
v = ak.arange(5)
i = ak.array([3, 1, 4, 0, 2])
s = ak.Series(data=v, index=i)
sorted = s.sort_index()
self.assertListEqual(sorted.index.to_pandas().tolist(), [i for i in range(5)])
self.assertListEqual(sorted.values.to_list(), [3, 1, 4, 0, 2])
def test_sort_value(self):
v = ak.array([3, 1, 4, 0, 2])
i = ak.arange(5)
s = ak.Series(data=v, index=i)
sorted = s.sort_values()
self.assertListEqual(sorted.index.to_pandas().tolist(), [3, 1, 4, 0, 2])
self.assertListEqual(sorted.values.to_list(), [i for i in range(5)])
def test_head_tail(self):
v = ak.arange(5)
i = ak.arange(5)
s = ak.Series(data=v, index=i)
head = s.head(2)
self.assertListEqual(head.index.to_pandas().tolist(), [0, 1])
self.assertListEqual(head.values.to_list(), [0, 1])
tail = s.tail(3)
self.assertListEqual(tail.index.to_pandas().tolist(), [2, 3, 4])
self.assertListEqual(tail.values.to_list(), [2, 3, 4])
def test_value_counts(self):
v = ak.array([0, 0, 1, 2, 2])
i = ak.arange(5)
s = ak.Series(data=v, index=i)
c = s.value_counts()
self.assertListEqual(c.index.to_pandas().tolist(), [0, 2, 1])
self.assertListEqual(c.values.to_list(), [2, 2, 1])
c = s.value_counts(sort=True)
self.assertListEqual(c.index.to_pandas().tolist(), [0, 2, 1])
self.assertListEqual(c.values.to_list(), [2, 2, 1])
def test_concat(self):
v = ak.arange(5)
i = ak.arange(5)
s = ak.Series(data=v, index=i)
v = ak.arange(5, 11, 1)
i = ak.arange(5, 11, 1)
s2 = ak.Series(data=v, index=i)
c = ak.Series.concat([s, s2])
self.assertListEqual(c.index.to_pandas().tolist(), [i for i in range(11)])
self.assertListEqual(c.values.to_list(), [i for i in range(11)])
df = ak.Series.concat([s, s2], axis=1)
self.assertIsInstance(df, ak.DataFrame)
ref_df = pd.DataFrame(
{
"idx": [i for i in range(11)],
"val_0": [0, 1, 2, 3, 4, 0, 0, 0, 0, 0, 0],
"val_1": [0, 0, 0, 0, 0, 5, 6, 7, 8, 9, 10],
}
)
self.assertTrue(((ref_df == df.to_pandas()).all()).all())
def test_pdconcat(self):
v = ak.arange(5)
i = ak.arange(5)
s = ak.Series(data=v, index=i)
v = ak.arange(5, 11, 1)
i = ak.arange(5, 11, 1)
s2 = ak.Series(data=v, index=i)
c = ak.Series.pdconcat([s, s2])
self.assertIsInstance(c, pd.Series)
self.assertListEqual(c.index.tolist(), [i for i in range(11)])
self.assertListEqual(c.values.tolist(), [i for i in range(11)])
v = ak.arange(5, 10, 1)
i = ak.arange(5, 10, 1)
s2 = ak.Series(data=v, index=i)
df = ak.Series.pdconcat([s, s2], axis=1)
self.assertIsInstance(df, pd.DataFrame)
ref_df = pd.DataFrame({0: [0, 1, 2, 3, 4], 1: [5, 6, 7, 8, 9]})
self.assertTrue((ref_df == df).all().all())
def test_index_as_index_compat(self):
# added to validate functionality for issue #1506
df = ak.DataFrame({"a": ak.arange(10), "b": ak.arange(10), "c": ak.arange(10)})
g = df.groupby(["a", "b"])
g.broadcast(g.sum("c")) | null |
1,160 | import os
import datetime
import logging
import json
import re
from typing import Tuple
from azure.core.exceptions import ResourceExistsError
from azure.identity import DefaultAzureCredential
from azure.storage.blob import ContainerSasPermissions, generate_container_sas, BlobServiceClient
from exceptions import NoFilesInRequestException, TooManyFilesInRequestException
def METHOD_NAME(account_name: str) -> str:
return f"https://{account_name}.blob.{get_storage_endpoint_suffix()}/"
def get_blob_client_from_blob_info(storage_account_name: str, container_name: str, blob_name: str):
source_blob_service_client = BlobServiceClient(account_url=METHOD_NAME(storage_account_name),
credential=get_credential())
source_container_client = source_blob_service_client.get_container_client(container_name)
return source_container_client.get_blob_client(blob_name)
def create_container(account_name: str, request_id: str):
try:
container_name = request_id
blob_service_client = BlobServiceClient(account_url=METHOD_NAME(account_name),
credential=get_credential())
blob_service_client.create_container(container_name)
logging.info(f'Container created for request id: {request_id}.')
except ResourceExistsError:
logging.info(f'Did not create a new container. Container already exists for request id: {request_id}.')
def get_request_files(account_name: str, request_id: str) -> list:
files = []
blob_service_client = BlobServiceClient(account_url=METHOD_NAME(account_name), credential=get_credential())
container_client = blob_service_client.get_container_client(container=request_id)
for blob in container_client.list_blobs():
files.append({"name": blob.name, "size": blob.size})
return files
def copy_data(source_account_name: str, destination_account_name: str, request_id: str):
credential = get_credential()
container_name = request_id
source_blob_service_client = BlobServiceClient(account_url=METHOD_NAME(source_account_name),
credential=credential)
source_container_client = source_blob_service_client.get_container_client(container_name)
# Check that we are copying exactly one blob
found_blobs = 0
blob_name = ""
for blob in source_container_client.list_blobs():
blob_name = blob.name
if found_blobs > 0:
msg = "Request with id {} contains more than 1 file. flow aborted.".format(request_id)
logging.error(msg)
raise TooManyFilesInRequestException(msg)
found_blobs += 1
if found_blobs == 0:
msg = "Request with id {} did not contain any files. flow aborted.".format(request_id)
logging.error(msg)
raise NoFilesInRequestException(msg)
udk = source_blob_service_client.get_user_delegation_key(datetime.datetime.utcnow() - datetime.timedelta(hours=1),
datetime.datetime.utcnow() + datetime.timedelta(hours=1))
# token geneation with expiry of 1 hour. since its not shared, we can leave it to expire (no need to track/delete)
# Remove sas token if not needed: https://github.com/microsoft/AzureTRE/issues/2034
sas_token = generate_container_sas(account_name=source_account_name,
container_name=container_name,
user_delegation_key=udk,
permission=ContainerSasPermissions(read=True),
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=1))
source_blob = source_container_client.get_blob_client(blob_name)
source_url = f'{source_blob.url}?{sas_token}'
# Set metadata to include the blob url that it is copied from
metadata = source_blob.get_blob_properties()["metadata"]
copied_from = json.loads(metadata["copied_from"]) if "copied_from" in metadata else []
metadata["copied_from"] = json.dumps(copied_from + [source_blob.url])
# Copy files
dest_blob_service_client = BlobServiceClient(account_url=METHOD_NAME(destination_account_name),
credential=credential)
copied_blob = dest_blob_service_client.get_blob_client(container_name, source_blob.blob_name)
copy = copied_blob.start_copy_from_url(source_url, metadata=metadata)
try:
logging.info("Copy operation returned 'copy_id': '%s', 'copy_status': '%s'", copy["copy_id"],
copy["copy_status"])
except KeyError as e:
logging.error(f"Failed getting operation id and status {e}")
def get_credential() -> DefaultAzureCredential:
managed_identity = os.environ.get("MANAGED_IDENTITY_CLIENT_ID")
if managed_identity:
logging.info("using the Airlock processor's managed identity to get credentials.")
return DefaultAzureCredential(managed_identity_client_id=os.environ["MANAGED_IDENTITY_CLIENT_ID"],
exclude_shared_token_cache_credential=True) if managed_identity else DefaultAzureCredential()
def get_blob_info_from_topic_and_subject(topic: str, subject: str):
# Example of a topic: "/subscriptions/<subscription_id>/resourceGroups/<reosurce_group_name>/providers/Microsoft.Storage/storageAccounts/<storage_account_name>"
storage_account_name = re.search(r'providers/Microsoft.Storage/storageAccounts/(.*?)$', topic).group(1)
# Example of a subject: "/blobServices/default/containers/<container_guid>/blobs/<blob_name>"
container_name, blob_name = re.search(r'/blobServices/default/containers/(.*?)/blobs/(.*?)$', subject).groups()
return storage_account_name, container_name, blob_name
def get_blob_info_from_blob_url(blob_url: str) -> Tuple[str, str, str]:
# Example of blob url: https://stalimappws663d.blob.core.windows.net/50866a82-d13a-4fd5-936f-deafdf1022ce/test_blob.txt
return re.search(rf'https://(.*?).blob.{get_storage_endpoint_suffix()}/(.*?)/(.*?)$', blob_url).groups()
def get_blob_url(account_name: str, container_name: str, blob_name='') -> str:
return f'{METHOD_NAME(account_name)}{container_name}/{blob_name}'
def get_storage_endpoint_suffix():
default_value = "core.windows.net"
try:
return os.environ["STORAGE_ENDPOINT_SUFFIX"]
except KeyError as e:
logging.warning(f"Missing environment variable: {e}. using default value: '{default_value}'")
return default_value | null |
1,161 | import neopixel
from storage import getmount
from kmk.extensions import Extension
from kmk.handlers.stock import passthrough as handler_passthrough
from kmk.keys import make_key
class Color:
OFF = [0, 0, 0]
BLACK = OFF
WHITE = [249, 249, 249]
RED = [255, 0, 0]
AZURE = [153, 245, 255]
BLUE = [0, 0, 255]
CYAN = [0, 255, 255]
GREEN = [0, 255, 0]
YELLOW = [255, 247, 0]
MAGENTA = [255, 0, 255]
ORANGE = [255, 77, 0]
PURPLE = [255, 0, 242]
TEAL = [0, 128, 128]
PINK = [255, 0, 255]
class Rgb_matrix_data:
def __init__(self, keys=[], underglow=[]):
if len(keys) == 0:
print('No colors passed for your keys')
return
if len(underglow) == 0:
print('No colors passed for your underglow')
return
self.data = keys + underglow
@staticmethod
def generate_led_map(
number_of_keys, number_of_underglow, key_color, underglow_color
):
keys = [key_color] * number_of_keys
underglow = [underglow_color] * number_of_underglow
print(f'Rgb_matrix_data(keys={keys},\nunderglow={underglow})')
class Rgb_matrix(Extension):
def __init__(
self,
rgb_order=(1, 0, 2), # GRB WS2812
disable_auto_write=False,
ledDisplay=[],
split=False,
rightSide=False,
):
name = str(getmount('/').label)
self.rgb_order = rgb_order
self.disable_auto_write = disable_auto_write
self.split = split
self.rightSide = rightSide
self.brightness_step = 0.1
self.brightness = 0
if name.endswith('L'):
self.rightSide = False
elif name.endswith('R'):
self.rightSide = True
if type(ledDisplay) is Rgb_matrix_data:
self.ledDisplay = ledDisplay.data
else:
self.ledDisplay = ledDisplay
make_key(
names=('RGB_TOG',), on_press=self._rgb_tog, on_release=handler_passthrough
)
make_key(
names=('RGB_BRI',), on_press=self._rgb_bri, on_release=handler_passthrough
)
make_key(
names=('RGB_BRD',), on_press=self._rgb_brd, on_release=handler_passthrough
)
def _rgb_tog(self, *args, **kwargs):
if self.enable:
self.off()
else:
self.on()
self.enable = not self.enable
def _rgb_bri(self, *args, **kwargs):
self.increase_brightness()
def _rgb_brd(self, *args, **kwargs):
self.decrease_brightness()
def on(self):
if self.neopixel:
self.METHOD_NAME()
self.neopixel.show()
def off(self):
if self.neopixel:
self.set_rgb_fill((0, 0, 0))
def set_rgb_fill(self, rgb):
if self.neopixel:
self.neopixel.fill(rgb)
if self.disable_auto_write:
self.neopixel.show()
def set_brightness(self, brightness=None):
if brightness is None:
brightness = self.brightness
if self.neopixel:
self.neopixel.brightness = brightness
if self.disable_auto_write:
self.neopixel.show()
def increase_brightness(self, step=None):
if step is None:
step = self.brightness_step
self.brightness = (
self.brightness + step if self.brightness + step <= 1.0 else 1.0
)
self.set_brightness(self.brightness)
def decrease_brightness(self, step=None):
if step is None:
step = self.brightness_step
self.brightness = (
self.brightness - step if self.brightness - step >= 0.0 else 0.0
)
self.set_brightness(self.brightness)
def METHOD_NAME(self):
if self.split:
for i, val in enumerate(self.ledDisplay):
if self.rightSide:
if self.keyPos[i] >= (self.num_pixels / 2):
self.neopixel[int(self.keyPos[i] - (self.num_pixels / 2))] = (
val[0],
val[1],
val[2],
)
else:
if self.keyPos[i] <= (self.num_pixels / 2):
self.neopixel[self.keyPos[i]] = (val[0], val[1], val[2])
else:
for i, val in enumerate(self.ledDisplay):
self.neopixel[self.keyPos[i]] = (val[0], val[1], val[2])
def on_runtime_enable(self, sandbox):
return
def on_runtime_disable(self, sandbox):
return
def during_bootup(self, board):
self.neopixel = neopixel.NeoPixel(
board.rgb_pixel_pin,
board.num_pixels,
brightness=board.brightness_limit,
pixel_order=self.rgb_order,
auto_write=not self.disable_auto_write,
)
self.num_pixels = board.num_pixels
self.keyPos = board.led_key_pos
self.brightness = board.brightness_limit
self.on()
return
def before_matrix_scan(self, sandbox):
return
def after_matrix_scan(self, sandbox):
return
def before_hid_send(self, sandbox):
return
def after_hid_send(self, sandbox):
return
def on_powersave_enable(self, sandbox):
if self.neopixel:
self.neopixel.brightness = (
self.neopixel.brightness / 2
if self.neopixel.brightness / 2 > 0
else 0.1
)
if self.disable_auto_write:
self.neopixel.show()
def on_powersave_disable(self, sandbox):
if self.neopixel:
self.neopixel.brightness = self.brightness
if self.disable_auto_write:
self.neopixel.show() | null |
1,162 | #!/usr/bin/env python
"""Build a mulled images for all recent conda recipe updates that don't have existing images.
Examples:
Build mulled images for recent bioconda changes with:
mulled-build-channel build
Build, test, and publish images with the follow command:
mulled-build-channel all
See recent changes that would be built with:
mulled-build-channel list
"""
import os
import subprocess
import sys
import time
import requests
from ._cli import arg_parser
from .mulled_build import (
add_build_arguments,
args_to_mull_targets_kwds,
build_target,
conda_versions,
get_affected_packages,
mull_targets,
)
from .util import (
quay_versions,
version_sorted,
)
def _fetch_repo_data(args):
repo_data = args.repo_data
channel = args.channel
if not os.path.exists(repo_data):
platform_tag = "osx-64" if sys.platform == "darwin" else "linux-64"
subprocess.check_call(
[
"wget",
"--quiet",
f"https://conda.anaconda.org/{channel}/{platform_tag}/repodata.json.bz2",
"-O",
f"{repo_data}.bz2",
]
)
subprocess.check_call(["bzip2", "-d", f"{repo_data}.bz2"])
return repo_data
def METHOD_NAME(quay, conda):
"""Calculate the versions that are in conda but not on quay.io."""
sconda = set(conda)
squay = set(quay) if quay else set()
return sconda - squay # sconda.symmetric_difference(squay)
def run_channel(args, build_last_n_versions=1):
"""Build list of involucro commands (as shell snippet) to run."""
session = requests.session()
for pkg_name, pkg_tests in get_affected_packages(args):
repo_data = _fetch_repo_data(args)
c = conda_versions(pkg_name, repo_data)
# only package the most recent N versions
c = version_sorted(c)[:build_last_n_versions]
if not args.force_rebuild:
time.sleep(1)
q = quay_versions(args.namespace, pkg_name, session)
versions = METHOD_NAME(q, c)
else:
versions = c
for tag in versions:
target = build_target(pkg_name, tag=tag)
targets = [target]
mull_targets(targets, test=pkg_tests, **args_to_mull_targets_kwds(args))
def get_pkg_names(args):
"""Print package names that would be affected."""
print("\n".join(pkg_name for pkg_name, pkg_tests in get_affected_packages(args)))
def add_channel_arguments(parser):
"""Add arguments only used if running mulled over a whole conda channel."""
parser.add_argument(
"--repo-data",
dest="repo_data",
required=True,
help='Published repository data. If you want to build all containers for bioconda, this parameter needs to be set to "bioconda"',
)
parser.add_argument(
"--diff-hours",
dest="diff_hours",
default="25",
help="If finding all recently changed recipes, use this number of hours.",
)
parser.add_argument("--recipes-dir", dest="recipes_dir", default="./bioconda-recipes")
parser.add_argument(
"--force-rebuild", dest="force_rebuild", action="store_true", help="Rebuild package even if already published."
)
def main(argv=None):
"""Main entry-point for the CLI tool."""
parser = arg_parser(argv, globals())
add_channel_arguments(parser)
add_build_arguments(parser)
parser.add_argument("command", metavar="COMMAND", help="Command (list, build-and-test, build, all)")
parser.add_argument(
"--targets", dest="targets", default=None, help="Build a single container with specific package(s)."
)
parser.add_argument(
"--repository-name",
dest="repository_name",
default=None,
help="Name of a single container (leave blank to auto-generate based on packages).",
)
args = parser.parse_args()
if args.command == "list":
get_pkg_names(args)
else:
run_channel(args)
__all__ = ("main",)
if __name__ == "__main__":
main() | null |
1,163 | from lark.lark import Token
from sqlalchemy.sql.operators import in_op, comparison_op
from sqlalchemy.sql import Selectable
from sqlalchemy.orm import Query
from sqlalchemy import Column
from sqlalchemy.orm.session import Session
from shared.database.source_control.file import File
from shared.shared_logger import get_shared_logger
from shared.database.source_control.file_stats import FileStats
from typing import List
import operator
logger = get_shared_logger()
def has_quotes(s: str) -> bool:
return (s.startswith('"') and s.endswith('"')) or (s.startswith("'") and s.endswith("'"))
class CompareOperator:
operator_value: operator or comparison_op
@staticmethod
def create_compare_operator_from_token(token: Token) -> 'CompareOperator':
string_operator_mapping = {
'>': operator.gt,
'<': operator.lt,
'=': operator.eq,
'!=': operator.ne,
'>=': operator.ge,
'<=': operator.le,
'in': in_op,
}
value = string_operator_mapping[token.value]
result = CompareOperator(operator_value = value)
return result
def __init__(self, operator_value: operator or comparison_op):
self.operator_value = operator_value
class QueryEntity:
key: any # any type, a scaler, or a string reserved word etc.
full_key: any # any type, a scaler, or a string reserved word etc.
parent_key: 'QueryEntity'
kind: str # scaler or reserved
key_has_been_type_corrected: bool
child_list: []
reserved_sub_words: list = ['id', 'tag']
def remove_plural(self):
if type(self.key) == str:
if self.key.endswith('s'):
self.key = self.key[: - 1]
def METHOD_NAME(self, token: Token) -> list:
self.child_list = []
if type(self.key) != str:
return self.child_list
list_items = token.value.split('.')
i = 0
query_entity_list = [self]
for item in reversed(list_items):
ent = QueryEntity()
ent.key = item
if i > 0:
ent.parent_key = query_entity_list[i - 1]
if i < len(list_items) - 1:
query_entity_list.append(ent)
self.child_list.append(ent)
return self.child_list
@staticmethod
def cast_int_from_unknown_type(value: any):
try:
return int(value)
except:
return value
def set_key_from_token_with_unknown_type(self, token: Token):
value = QueryEntity.cast_int_from_unknown_type(token.value)
new_value = None
full_key = None
if type(value) == int:
new_value = value
if type(value) == list:
new_value = value
if type(value) == str:
new_value = value.split('.')[0]
full_key = value
self.key = new_value
self.full_key = full_key
self.key_has_been_type_corrected = True
if type(value) not in [int, str, list]:
raise NotImplementedError
def change_key_based_on_sub_elements(self):
final_key = self.key
if len(self.child_list) > 1:
for i in range(len(self.child_list) - 1, -1, -1):
child = self.child_list[i]
if i == len(self.child_list) - 1:
continue
if child.key in self.reserved_sub_words:
final_key += '_' + child.key
self.key = final_key
@staticmethod
def new(token) -> 'QueryEntity':
entity = QueryEntity()
entity.set_key_from_token_with_unknown_type(token)
entity.remove_plural()
entity.METHOD_NAME(token)
entity.change_key_based_on_sub_elements()
return entity
class QueryElement:
list_value: list
raw_value: any
column: Column or None
subquery: Query
project_id: int
token: Token
top_level_key: None
log: dict
query_entity: QueryEntity
query_entity_children: List[QueryEntity]
reserved_words: List[str] = ['label', 'attribute', 'file', 'dataset_id', 'dataset_tag', 'list']
def build_query(self, session: Session, token: Token) -> Selectable:
raise NotImplementedError
def determine_if_reserved_word(self, word: str):
if word in self.reserved_words:
return True
@staticmethod
def new(session: Session,
log: dict,
project_id: int,
token: Token) -> ['QueryElement', dict]:
"""
Generates a query element from the given entity type.
:param session:
:param log:
:param project_id:
:param entity_type:
:param token:
:return:
"""
from shared.query_engine.sql_alchemy_query_elements.dataset_tag import TagDatasetQueryElement
from shared.query_engine.sql_alchemy_query_elements.file import FileQueryElement
from shared.query_engine.sql_alchemy_query_elements.attribute import AttributeQueryElement
from shared.query_engine.sql_alchemy_query_elements.dataset import DatasetQuery
from shared.query_engine.sql_alchemy_query_elements.scalar import ScalarQueryElement
from shared.query_engine.sql_alchemy_query_elements.labels import LabelsQueryElement
query_element = QueryElement()
entity = QueryEntity.new(token)
query_element.query_entity = entity
is_reserved_word = False
if type(entity.key) == str and not has_quotes(entity.key):
is_reserved_word = query_element.determine_if_reserved_word(entity.key)
if not is_reserved_word:
log['error'][
'is_reserved_word'] = f"Entity: {entity.key} is not valid. Valid options are {query_element.reserved_words}"
return None, log
else:
entity.key = "scalar"
string_query_class = {
'label': LabelsQueryElement,
'attribute': AttributeQueryElement,
'file': FileQueryElement,
'dataset_id': DatasetQuery,
'dataset_tag': TagDatasetQueryElement,
'scalar': ScalarQueryElement
}
QueryClass = string_query_class.get(entity.key)
if QueryClass is None:
raise NotImplementedError
query_class = QueryClass()
query_class.query_entity = entity
query_class.token = token
query_class.project_id = project_id
query_class.is_reserved_word = is_reserved_word
query_class.session = session
query_class.log = log
query_class.build_query(session = session, token = token)
return query_class, log | null |
1,164 | #
# formatter.py
#
# Convert parsed content blocks to a structured document (library file).
#
# Copyright 2002-2018 by
# David Turner.
#
# This file is part of the FreeType project, and may only be used,
# modified, and distributed under the terms of the FreeType project
# license, LICENSE.TXT. By continuing to use, modify, or distribute
# this file you indicate that you have read the license and
# understand and accept it fully.
#
# This is the base Formatter class. Its purpose is to convert a content
# processor's data into specific documents (i.e., table of contents, global
# index, and individual API reference indices).
#
# You need to sub-class it to output anything sensible. For example, the
# file `tohtml.py' contains the definition of the `HtmlFormatter' sub-class
# to output HTML.
#
from sources import *
from content import *
from utils import *
################################################################
##
## FORMATTER CLASS
##
class Formatter:
def __init__( self, processor ):
self.processor = processor
self.identifiers = {}
self.chapters = processor.chapters
self.sections = processor.sections.values()
self.block_index = []
# store all blocks in a dictionary
self.blocks = []
for section in self.sections:
for block in section.blocks.values():
self.add_identifier( block.name, block )
# add enumeration values to the index, since this is useful
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
self.add_identifier( field.name, block )
self.block_index = self.identifiers.keys()
self.block_index.sort( key = index_key )
# also add section names to dictionary (without making them appear
# in the index)
for section in self.sections:
self.add_identifier( section.name, section )
def add_identifier( self, name, block ):
if name in self.identifiers:
# duplicate name!
sys.stderr.write( "WARNING: duplicate definition for"
+ " '" + name + "' "
+ "in " + block.location() + ", "
+ "previous definition in "
+ self.identifiers[name].location()
+ "\n" )
else:
self.identifiers[name] = block
#
# formatting the table of contents
#
def toc_enter( self ):
pass
def toc_chapter_enter( self, chapter ):
pass
def toc_section_enter( self, section ):
pass
def toc_section_exit( self, section ):
pass
def toc_chapter_exit( self, chapter ):
pass
def toc_index( self, index_filename ):
pass
def toc_exit( self ):
pass
def toc_dump( self, toc_filename = None, index_filename = None ):
output = None
if toc_filename:
output = open_output( toc_filename )
self.toc_enter()
for chap in self.processor.chapters:
self.toc_chapter_enter( chap )
for section in chap.sections:
self.toc_section_enter( section )
self.toc_section_exit( section )
self.toc_chapter_exit( chap )
self.toc_index( index_filename )
self.toc_exit()
if output:
close_output( output )
#
# formatting the index
#
def index_enter( self ):
pass
def index_name_enter( self, name ):
pass
def index_name_exit( self, name ):
pass
def index_exit( self ):
pass
def index_dump( self, index_filename = None ):
output = None
if index_filename:
output = open_output( index_filename )
self.index_enter()
for name in self.block_index:
self.index_name_enter( name )
self.index_name_exit( name )
self.index_exit()
if output:
close_output( output )
#
# formatting a section
#
def section_enter( self, section ):
pass
def METHOD_NAME( self, block ):
pass
def markup_enter( self, markup, block = None ):
pass
def field_enter( self, field, markup = None, block = None ):
pass
def field_exit( self, field, markup = None, block = None ):
pass
def markup_exit( self, markup, block = None ):
pass
def block_exit( self, block ):
pass
def section_exit( self, section ):
pass
def section_dump( self, section, section_filename = None ):
output = None
if section_filename:
output = open_output( section_filename )
self.section_enter( section )
for name in section.block_names:
skip_entry = 0
try:
block = self.identifiers[name]
# `block_names' can contain field names also,
# which we filter out
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
if field.name == name:
skip_entry = 1
except:
skip_entry = 1 # this happens e.g. for `/empty/' entries
if skip_entry:
continue
self.METHOD_NAME( block )
for markup in block.markups[1:]: # always ignore first markup!
self.markup_enter( markup, block )
for field in markup.fields:
self.field_enter( field, markup, block )
self.field_exit( field, markup, block )
self.markup_exit( markup, block )
self.block_exit( block )
self.section_exit( section )
if output:
close_output( output )
def section_dump_all( self ):
for section in self.sections:
self.section_dump( section )
# eof | null |
1,165 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeLaunchTemplateVersionsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeLaunchTemplateVersions','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_LaunchTemplateName(self): # String
return self.get_query_params().get('LaunchTemplateName')
def set_LaunchTemplateName(self, LaunchTemplateName): # String
self.add_query_param('LaunchTemplateName', LaunchTemplateName)
def get_MaxVersion(self): # Long
return self.get_query_params().get('MaxVersion')
def set_MaxVersion(self, MaxVersion): # Long
self.add_query_param('MaxVersion', MaxVersion)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DefaultVersion(self): # Boolean
return self.get_query_params().get('DefaultVersion')
def set_DefaultVersion(self, DefaultVersion): # Boolean
self.add_query_param('DefaultVersion', DefaultVersion)
def get_MinVersion(self): # Long
return self.get_query_params().get('MinVersion')
def set_MinVersion(self, MinVersion): # Long
self.add_query_param('MinVersion', MinVersion)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_LaunchTemplateId(self): # String
return self.get_query_params().get('LaunchTemplateId')
def set_LaunchTemplateId(self, LaunchTemplateId): # String
self.add_query_param('LaunchTemplateId', LaunchTemplateId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LaunchTemplateVersions(self): # RepeatList
return self.get_query_params().get('LaunchTemplateVersion')
def METHOD_NAME(self, LaunchTemplateVersion): # RepeatList
for depth1 in range(len(LaunchTemplateVersion)):
self.add_query_param('LaunchTemplateVersion.' + str(depth1 + 1), LaunchTemplateVersion[depth1])
def get_DetailFlag(self): # Boolean
return self.get_query_params().get('DetailFlag')
def set_DetailFlag(self, DetailFlag): # Boolean
self.add_query_param('DetailFlag', DetailFlag) | null |
1,166 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class CreateVirtualBorderRouterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'CreateVirtualBorderRouter','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_CircuitCode(self): # String
return self.get_query_params().get('CircuitCode')
def set_CircuitCode(self, CircuitCode): # String
self.add_query_param('CircuitCode', CircuitCode)
def get_VlanId(self): # Integer
return self.get_query_params().get('VlanId')
def set_VlanId(self, VlanId): # Integer
self.add_query_param('VlanId', VlanId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_EnableIpv6(self): # Boolean
return self.get_query_params().get('EnableIpv6')
def set_EnableIpv6(self, EnableIpv6): # Boolean
self.add_query_param('EnableIpv6', EnableIpv6)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_PeerGatewayIp(self): # String
return self.get_query_params().get('PeerGatewayIp')
def set_PeerGatewayIp(self, PeerGatewayIp): # String
self.add_query_param('PeerGatewayIp', PeerGatewayIp)
def get_PeerIpv6GatewayIp(self): # String
return self.get_query_params().get('PeerIpv6GatewayIp')
def set_PeerIpv6GatewayIp(self, PeerIpv6GatewayIp): # String
self.add_query_param('PeerIpv6GatewayIp', PeerIpv6GatewayIp)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_PeeringSubnetMask(self): # String
return self.get_query_params().get('PeeringSubnetMask')
def set_PeeringSubnetMask(self, PeeringSubnetMask): # String
self.add_query_param('PeeringSubnetMask', PeeringSubnetMask)
def get_LocalGatewayIp(self): # String
return self.get_query_params().get('LocalGatewayIp')
def set_LocalGatewayIp(self, LocalGatewayIp): # String
self.add_query_param('LocalGatewayIp', LocalGatewayIp)
def get_PeeringIpv6SubnetMask(self): # String
return self.get_query_params().get('PeeringIpv6SubnetMask')
def set_PeeringIpv6SubnetMask(self, PeeringIpv6SubnetMask): # String
self.add_query_param('PeeringIpv6SubnetMask', PeeringIpv6SubnetMask)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Bandwidth(self): # Long
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self, Bandwidth): # Long
self.add_query_param('Bandwidth', Bandwidth)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_Tagss(self): # RepeatList
return self.get_query_params().get('Tags')
def set_Tagss(self, Tags): # RepeatList
for depth1 in range(len(Tags)):
if Tags[depth1].get('Value') is not None:
self.add_query_param('Tags.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
if Tags[depth1].get('Key') is not None:
self.add_query_param('Tags.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
def get_PhysicalConnectionId(self): # String
return self.get_query_params().get('PhysicalConnectionId')
def set_PhysicalConnectionId(self, PhysicalConnectionId): # String
self.add_query_param('PhysicalConnectionId', PhysicalConnectionId)
def get_LocalIpv6GatewayIp(self): # String
return self.get_query_params().get('LocalIpv6GatewayIp')
def METHOD_NAME(self, LocalIpv6GatewayIp): # String
self.add_query_param('LocalIpv6GatewayIp', LocalIpv6GatewayIp)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_VbrOwnerId(self): # Long
return self.get_query_params().get('VbrOwnerId')
def set_VbrOwnerId(self, VbrOwnerId): # Long
self.add_query_param('VbrOwnerId', VbrOwnerId) | null |
1,167 | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
"""
from __future__ import annotations
from petstore_api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
SmallCamel: typing_extensions.TypeAlias = schemas.StrSchema
METHOD_NAME: typing_extensions.TypeAlias = schemas.StrSchema
SmallSnake: typing_extensions.TypeAlias = schemas.StrSchema
CapitalSnake: typing_extensions.TypeAlias = schemas.StrSchema
SCAETHFlowPoints: typing_extensions.TypeAlias = schemas.StrSchema
ATTNAME: typing_extensions.TypeAlias = schemas.StrSchema
Properties = typing.TypedDict(
'Properties',
{
"smallCamel": typing.Type[SmallCamel],
"CapitalCamel": typing.Type[METHOD_NAME],
"small_Snake": typing.Type[SmallSnake],
"Capital_Snake": typing.Type[CapitalSnake],
"SCA_ETH_Flow_Points": typing.Type[SCAETHFlowPoints],
"ATT_NAME": typing.Type[ATTNAME],
}
)
class CapitalizationDict(schemas.immutabledict[str, str]):
__required_keys__: typing.FrozenSet[str] = frozenset({
})
__optional_keys__: typing.FrozenSet[str] = frozenset({
"smallCamel",
"CapitalCamel",
"small_Snake",
"Capital_Snake",
"SCA_ETH_Flow_Points",
"ATT_NAME",
})
def __new__(
cls,
*,
smallCamel: typing.Union[
str,
schemas.Unset
] = schemas.unset,
METHOD_NAME: typing.Union[
str,
schemas.Unset
] = schemas.unset,
small_Snake: typing.Union[
str,
schemas.Unset
] = schemas.unset,
Capital_Snake: typing.Union[
str,
schemas.Unset
] = schemas.unset,
SCA_ETH_Flow_Points: typing.Union[
str,
schemas.Unset
] = schemas.unset,
ATT_NAME: typing.Union[
str,
schemas.Unset
] = schemas.unset,
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
**kwargs: schemas.INPUT_TYPES_ALL,
):
arg_: typing.Dict[str, typing.Any] = {}
for key, val in (
("smallCamel", smallCamel),
("CapitalCamel", METHOD_NAME),
("small_Snake", small_Snake),
("Capital_Snake", Capital_Snake),
("SCA_ETH_Flow_Points", SCA_ETH_Flow_Points),
("ATT_NAME", ATT_NAME),
):
if isinstance(val, schemas.Unset):
continue
arg_[key] = val
arg_.update(kwargs)
used_arg_ = typing.cast(CapitalizationDictInput, arg_)
return Capitalization.validate(used_arg_, configuration=configuration_)
@staticmethod
def from_dict_(
arg: typing.Union[
CapitalizationDictInput,
CapitalizationDict
],
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> CapitalizationDict:
return Capitalization.validate(arg, configuration=configuration)
@property
def smallCamel(self) -> typing.Union[str, schemas.Unset]:
val = self.get("smallCamel", schemas.unset)
if isinstance(val, schemas.Unset):
return val
return typing.cast(
str,
val
)
@property
def METHOD_NAME(self) -> typing.Union[str, schemas.Unset]:
val = self.get("CapitalCamel", schemas.unset)
if isinstance(val, schemas.Unset):
return val
return typing.cast(
str,
val
)
@property
def small_Snake(self) -> typing.Union[str, schemas.Unset]:
val = self.get("small_Snake", schemas.unset)
if isinstance(val, schemas.Unset):
return val
return typing.cast(
str,
val
)
@property
def Capital_Snake(self) -> typing.Union[str, schemas.Unset]:
val = self.get("Capital_Snake", schemas.unset)
if isinstance(val, schemas.Unset):
return val
return typing.cast(
str,
val
)
@property
def SCA_ETH_Flow_Points(self) -> typing.Union[str, schemas.Unset]:
val = self.get("SCA_ETH_Flow_Points", schemas.unset)
if isinstance(val, schemas.Unset):
return val
return typing.cast(
str,
val
)
@property
def ATT_NAME(self) -> typing.Union[str, schemas.Unset]:
val = self.get("ATT_NAME", schemas.unset)
if isinstance(val, schemas.Unset):
return val
return typing.cast(
str,
val
)
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
return self.get(name, schemas.unset)
CapitalizationDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
@dataclasses.dataclass(frozen=True)
class Capitalization(
schemas.Schema[CapitalizationDict, tuple]
):
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator.
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
Do not edit the class manually.
"""
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
type_to_output_cls: typing.Mapping[
typing.Type,
typing.Type
] = dataclasses.field(
default_factory=lambda: {
schemas.immutabledict: CapitalizationDict
}
)
@classmethod
def validate(
cls,
arg: typing.Union[
CapitalizationDictInput,
CapitalizationDict,
],
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> CapitalizationDict:
return super().validate_base(
arg,
configuration=configuration,
)
| null |
1,168 | from typing import Union
from .registry import DATASET_COLLECTION_TYPES_REGISTRY
class CollectionTypeDescriptionFactory:
def __init__(self, type_registry=DATASET_COLLECTION_TYPES_REGISTRY):
# taking in type_registry though not using it, because we will someday
# I think.
self.type_registry = type_registry
def METHOD_NAME(self, collection_type):
assert collection_type is not None
return CollectionTypeDescription(collection_type, self)
class CollectionTypeDescription:
"""Abstraction over dataset collection type that ties together string
reprentation in database/model with type registry.
>>> factory = CollectionTypeDescriptionFactory(None)
>>> nested_type_description = factory.for_collection_type("list:paired")
>>> paired_type_description = factory.for_collection_type("paired")
>>> nested_type_description.has_subcollections_of_type("list")
False
>>> nested_type_description.has_subcollections_of_type("list:paired")
False
>>> nested_type_description.has_subcollections_of_type("paired")
True
>>> nested_type_description.has_subcollections_of_type(paired_type_description)
True
>>> nested_type_description.has_subcollections()
True
>>> paired_type_description.has_subcollections()
False
>>> paired_type_description.rank_collection_type()
'paired'
>>> nested_type_description.rank_collection_type()
'list'
>>> nested_type_description.effective_collection_type(paired_type_description)
'list'
>>> nested_type_description.effective_collection_type_description(paired_type_description).collection_type
'list'
>>> nested_type_description.child_collection_type()
'paired'
"""
collection_type: str
def __init__(self, collection_type: Union[str, "CollectionTypeDescription"], collection_type_description_factory):
if isinstance(collection_type, CollectionTypeDescription):
self.collection_type = collection_type.collection_type
else:
self.collection_type = collection_type
self.collection_type_description_factory = collection_type_description_factory
self.__has_subcollections = self.collection_type.find(":") > 0
def child_collection_type(self):
rank_collection_type = self.rank_collection_type()
return self.collection_type[len(rank_collection_type) + 1 :]
def child_collection_type_description(self):
child_collection_type = self.child_collection_type()
return self.collection_type_description_factory.METHOD_NAME(child_collection_type)
def effective_collection_type_description(self, subcollection_type):
effective_collection_type = self.effective_collection_type(subcollection_type)
return self.collection_type_description_factory.METHOD_NAME(effective_collection_type)
def effective_collection_type(self, subcollection_type):
if hasattr(subcollection_type, "collection_type"):
subcollection_type = subcollection_type.collection_type
if not self.has_subcollections_of_type(subcollection_type):
raise ValueError(f"Cannot compute effective subcollection type of {subcollection_type} over {self}")
return self.collection_type[: -(len(subcollection_type) + 1)]
def has_subcollections_of_type(self, other_collection_type):
"""Take in another type (either flat string or another
CollectionTypeDescription) and determine if this collection contains
subcollections matching that type.
The way this is used in map/reduce it seems to make the most sense
for this to return True if these subtypes are proper (i.e. a type
is not considered to have subcollections of its own type).
"""
if hasattr(other_collection_type, "collection_type"):
other_collection_type = other_collection_type.collection_type
collection_type = self.collection_type
return collection_type.endswith(other_collection_type) and collection_type != other_collection_type
def is_subcollection_of_type(self, other_collection_type):
if not hasattr(other_collection_type, "collection_type"):
other_collection_type = self.collection_type_description_factory.METHOD_NAME(other_collection_type)
return other_collection_type.has_subcollections_of_type(self)
def can_match_type(self, other_collection_type):
if hasattr(other_collection_type, "collection_type"):
other_collection_type = other_collection_type.collection_type
collection_type = self.collection_type
return other_collection_type == collection_type
def subcollection_type_description(self):
if not self.__has_subcollections:
raise ValueError(f"Cannot generate subcollection type description for flat type {self.collection_type}")
subcollection_type = self.collection_type.split(":", 1)[1]
return self.collection_type_description_factory.METHOD_NAME(subcollection_type)
def has_subcollections(self):
return self.__has_subcollections
def rank_collection_type(self):
"""Return the top-level collection type corresponding to this
collection type. For instance the "rank" type of a list of paired
data ("list:paired") is "list".
"""
return self.collection_type.split(":")[0]
def rank_type_plugin(self):
return self.collection_type_description_factory.type_registry.get(self.rank_collection_type())
@property
def dimension(self):
return len(self.collection_type.split(":")) + 1
def multiply(self, other_collection_type):
collection_type = map_over_collection_type(self, other_collection_type)
return self.collection_type_description_factory.METHOD_NAME(collection_type)
def __str__(self):
return f"CollectionTypeDescription[{self.collection_type}]"
def map_over_collection_type(mapped_over_collection_type, target_collection_type):
if hasattr(mapped_over_collection_type, "collection_type"):
mapped_over_collection_type = mapped_over_collection_type.collection_type
if not target_collection_type:
return mapped_over_collection_type
else:
if hasattr(target_collection_type, "collection_type"):
target_collection_type = target_collection_type.collection_type
return f"{mapped_over_collection_type}:{target_collection_type}"
COLLECTION_TYPE_DESCRIPTION_FACTORY = CollectionTypeDescriptionFactory() | null |
1,169 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeInstancesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ens', '2017-11-10', 'DescribeInstances','ens')
self.set_method('POST')
def get_OrderByParams(self): # String
return self.get_query_params().get('OrderByParams')
def set_OrderByParams(self, OrderByParams): # String
self.add_query_param('OrderByParams', OrderByParams)
def get_EnsRegionId(self): # String
return self.get_query_params().get('EnsRegionId')
def METHOD_NAME(self, EnsRegionId): # String
self.add_query_param('EnsRegionId', EnsRegionId)
def get_InstanceResourceType(self): # String
return self.get_query_params().get('InstanceResourceType')
def set_InstanceResourceType(self, InstanceResourceType): # String
self.add_query_param('InstanceResourceType', InstanceResourceType)
def get_EnsServiceId(self): # String
return self.get_query_params().get('EnsServiceId')
def set_EnsServiceId(self, EnsServiceId): # String
self.add_query_param('EnsServiceId', EnsServiceId)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_InstanceName(self): # String
return self.get_query_params().get('InstanceName')
def set_InstanceName(self, InstanceName): # String
self.add_query_param('InstanceName', InstanceName)
def get_InstanceIds(self): # String
return self.get_query_params().get('InstanceIds')
def set_InstanceIds(self, InstanceIds): # String
self.add_query_param('InstanceIds', InstanceIds)
def get_NetworkId(self): # String
return self.get_query_params().get('NetworkId')
def set_NetworkId(self, NetworkId): # String
self.add_query_param('NetworkId', NetworkId)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status)
def get_IntranetIp(self): # String
return self.get_query_params().get('IntranetIp')
def set_IntranetIp(self, IntranetIp): # String
self.add_query_param('IntranetIp', IntranetIp)
def get_ImageId(self): # String
return self.get_query_params().get('ImageId')
def set_ImageId(self, ImageId): # String
self.add_query_param('ImageId', ImageId)
def get_SecurityGroupId(self): # String
return self.get_query_params().get('SecurityGroupId')
def set_SecurityGroupId(self, SecurityGroupId): # String
self.add_query_param('SecurityGroupId', SecurityGroupId)
def get_SearchKey(self): # String
return self.get_query_params().get('SearchKey')
def set_SearchKey(self, SearchKey): # String
self.add_query_param('SearchKey', SearchKey)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_PageSize(self): # String
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # String
self.add_query_param('PageSize', PageSize)
def get_EnsRegionIds(self): # String
return self.get_query_params().get('EnsRegionIds')
def set_EnsRegionIds(self, EnsRegionIds): # String
self.add_query_param('EnsRegionIds', EnsRegionIds) | null |
1,170 | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: [email protected]
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictStr, constr, validator
from lightly.openapi_generated.swagger_client.models.datasource_config_base import DatasourceConfigBase
from lightly.openapi_generated.swagger_client.models.s3_region import S3Region
class DatasourceConfigS3(DatasourceConfigBase):
"""
DatasourceConfigS3
"""
full_path: StrictStr = Field(..., alias="fullPath", description="path includes the bucket name and the path within the bucket where you have stored your information")
s3_region: S3Region = Field(..., alias="s3Region")
s3_access_key_id: constr(strict=True, min_length=1) = Field(..., alias="s3AccessKeyId", description="The accessKeyId of the credential you are providing Lightly to use")
s3_secret_access_key: constr(strict=True, min_length=1) = Field(..., alias="s3SecretAccessKey", description="The secretAccessKey of the credential you are providing Lightly to use")
s3_server_side_encryption_kms_key: Optional[constr(strict=True, min_length=1)] = Field(None, alias="s3ServerSideEncryptionKMSKey", description="If set, Lightly Worker will automatically set the headers to use server side encryption https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingKMSEncryption.html with this value as the appropriate KMS key arn. This will encrypt the files created by Lightly (crops, frames, thumbnails) in the S3 bucket. ")
__properties = ["id", "purpose", "type", "thumbSuffix", "fullPath", "s3Region", "s3AccessKeyId", "s3SecretAccessKey", "s3ServerSideEncryptionKMSKey"]
@validator('s3_server_side_encryption_kms_key')
def s3_server_side_encryption_kms_key_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^arn:aws:kms:[a-zA-Z0-9-]*:[0-9]{12}:key.+$", value):
raise ValueError(r"must validate the regular expression /^arn:aws:kms:[a-zA-Z0-9-]*:[0-9]{12}:key.+$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> DatasourceConfigS3:
"""Create an instance of DatasourceConfigS3 from a JSON string"""
return cls.METHOD_NAME(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def METHOD_NAME(cls, obj: dict) -> DatasourceConfigS3:
"""Create an instance of DatasourceConfigS3 from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return DatasourceConfigS3.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in DatasourceConfigS3) in the input: " + str(obj))
_obj = DatasourceConfigS3.parse_obj({
"id": obj.get("id"),
"purpose": obj.get("purpose"),
"type": obj.get("type"),
"thumb_suffix": obj.get("thumbSuffix"),
"full_path": obj.get("fullPath"),
"s3_region": obj.get("s3Region"),
"s3_access_key_id": obj.get("s3AccessKeyId"),
"s3_secret_access_key": obj.get("s3SecretAccessKey"),
"s3_server_side_encryption_kms_key": obj.get("s3ServerSideEncryptionKMSKey")
})
return _obj
| null |
1,171 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CreateLoadBalancerHTTPSListenerRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ens', '2017-11-10', 'CreateLoadBalancerHTTPSListener','ens')
self.set_method('POST')
def get_ListenerForward(self): # String
return self.get_query_params().get('ListenerForward')
def set_ListenerForward(self, ListenerForward): # String
self.add_query_param('ListenerForward', ListenerForward)
def get_HealthCheckTimeout(self): # Integer
return self.get_query_params().get('HealthCheckTimeout')
def set_HealthCheckTimeout(self, HealthCheckTimeout): # Integer
self.add_query_param('HealthCheckTimeout', HealthCheckTimeout)
def get_HealthCheckURI(self): # String
return self.get_query_params().get('HealthCheckURI')
def set_HealthCheckURI(self, HealthCheckURI): # String
self.add_query_param('HealthCheckURI', HealthCheckURI)
def get_HealthCheck(self): # String
return self.get_query_params().get('HealthCheck')
def set_HealthCheck(self, HealthCheck): # String
self.add_query_param('HealthCheck', HealthCheck)
def get_Cookie(self): # String
return self.get_query_params().get('Cookie')
def set_Cookie(self, Cookie): # String
self.add_query_param('Cookie', Cookie)
def get_HealthCheckMethod(self): # String
return self.get_query_params().get('HealthCheckMethod')
def set_HealthCheckMethod(self, HealthCheckMethod): # String
self.add_query_param('HealthCheckMethod', HealthCheckMethod)
def get_HealthCheckDomain(self): # String
return self.get_query_params().get('HealthCheckDomain')
def set_HealthCheckDomain(self, HealthCheckDomain): # String
self.add_query_param('HealthCheckDomain', HealthCheckDomain)
def get_RequestTimeout(self): # Integer
return self.get_query_params().get('RequestTimeout')
def set_RequestTimeout(self, RequestTimeout): # Integer
self.add_query_param('RequestTimeout', RequestTimeout)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_ForwardPort(self): # Integer
return self.get_query_params().get('ForwardPort')
def set_ForwardPort(self, ForwardPort): # Integer
self.add_query_param('ForwardPort', ForwardPort)
def get_CookieTimeout(self): # Integer
return self.get_query_params().get('CookieTimeout')
def set_CookieTimeout(self, CookieTimeout): # Integer
self.add_query_param('CookieTimeout', CookieTimeout)
def get_StickySessionType(self): # String
return self.get_query_params().get('StickySessionType')
def set_StickySessionType(self, StickySessionType): # String
self.add_query_param('StickySessionType', StickySessionType)
def get_ListenerPort(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_ServerCertificateId(self): # String
return self.get_query_params().get('ServerCertificateId')
def set_ServerCertificateId(self, ServerCertificateId): # String
self.add_query_param('ServerCertificateId', ServerCertificateId)
def get_IdleTimeout(self): # Integer
return self.get_query_params().get('IdleTimeout')
def set_IdleTimeout(self, IdleTimeout): # Integer
self.add_query_param('IdleTimeout', IdleTimeout)
def get_HealthCheckConnectPort(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort)
def get_HealthCheckHttpCode(self): # String
return self.get_query_params().get('HealthCheckHttpCode')
def set_HealthCheckHttpCode(self, HealthCheckHttpCode): # String
self.add_query_param('HealthCheckHttpCode', HealthCheckHttpCode) | null |
1,172 | import unittest
from importlib import import_module
import os
import warnings
import AnyQt
from Orange.data.table import Table
from Orange.classification import LogisticRegressionLearner
from Orange.classification.tree import TreeLearner
from Orange.evaluation import CrossValidation
from Orange.distance import Euclidean
from Orange.widgets.report.owreport import OWReport
from Orange.widgets.widget import OWWidget
from Orange.widgets.tests.base import WidgetTest
from Orange.widgets.visualize.owtreeviewer import OWTreeGraph
from Orange.widgets.evaluate.owcalibrationplot import OWCalibrationPlot
from Orange.widgets.evaluate.owliftcurve import OWLiftCurve
from Orange.widgets.evaluate.owrocanalysis import OWROCAnalysis
from Orange.widgets.evaluate.owtestandscore import OWTestAndScore
from Orange.widgets.unsupervised.owcorrespondence import OWCorrespondenceAnalysis
from Orange.widgets.unsupervised.owdistancemap import OWDistanceMap
from Orange.widgets.unsupervised.owdistances import OWDistances
from Orange.widgets.unsupervised.owhierarchicalclustering import OWHierarchicalClustering
from Orange.widgets.unsupervised.owkmeans import OWKMeans
from Orange.widgets.unsupervised.owmds import OWMDS
from Orange.widgets.unsupervised.owpca import OWPCA
def METHOD_NAME(top_module_name):
top_module = import_module(top_module_name)
widgets = []
for root, _, files in os.walk(top_module.__path__[0]):
root = root[len(top_module.__path__[0]):].lstrip(os.path.sep)
for file in files:
if file.lower().startswith('ow') and file.lower().endswith('.py'):
module_name = "{}.{}".format(
top_module_name,
os.path.join(root, file).replace(os.path.sep, '.')[:-len('.py')])
try:
module = import_module(module_name,
top_module_name[:top_module_name.index('.')])
except (ImportError, RuntimeError):
warnings.warn('Failed to import module: ' + module_name)
continue
for name, value in module.__dict__.items():
if (name.upper().startswith('OW') and
isinstance(value, type) and
issubclass(value, OWWidget) and
getattr(value, 'name', None) and
getattr(value, 'send_report', None)):
widgets.append(value)
return list(set(widgets))
DATA_WIDGETS = METHOD_NAME('Orange.widgets.data')
VISUALIZATION_WIDGETS = METHOD_NAME('Orange.widgets.visualize')
MODEL_WIDGETS = METHOD_NAME('Orange.widgets.model')
class TestReportWidgets(WidgetTest):
model_widgets = MODEL_WIDGETS
data_widgets = DATA_WIDGETS
eval_widgets = [OWCalibrationPlot, OWLiftCurve, OWROCAnalysis]
unsu_widgets = [OWCorrespondenceAnalysis, OWDistances, OWKMeans,
OWMDS, OWPCA]
dist_widgets = [OWDistanceMap, OWHierarchicalClustering]
visu_widgets = VISUALIZATION_WIDGETS
spec_widgets = [OWTestAndScore, OWTreeGraph]
def _create_report(self, widgets, rep, data):
for widget in widgets:
w = self.create_widget(widget)
if w.inputs and isinstance(data, w.inputs[0].type):
handler = getattr(w, w.inputs[0].handler)
handler(data)
w.create_report_html()
rep.make_report(w)
# rep.show()
def test_report_widgets_model(self):
rep = OWReport.get_instance()
data = Table("titanic")
widgets = self.model_widgets
w = self.create_widget(OWTreeGraph)
clf = TreeLearner(max_depth=3)(data)
clf.instances = data
w.ctree(clf)
w.create_report_html()
rep.make_report(w)
self._create_report(widgets, rep, data)
def test_report_widgets_data(self):
rep = OWReport.get_instance()
data = Table("zoo")
widgets = self.data_widgets
self._create_report(widgets, rep, data)
def test_report_widgets_evaluate(self):
rep = OWReport.get_instance()
data = Table("zoo")
widgets = self.eval_widgets
cv = CrossValidation(k=3, store_data=True)
results = cv(data, [LogisticRegressionLearner()])
results.learner_names = ["LR l2"]
w = self.create_widget(OWTestAndScore)
w.insert_learner(0, LogisticRegressionLearner())
w.set_train_data(data)
w.set_test_data(data)
w.create_report_html()
rep.make_report(w)
self._create_report(widgets, rep, results)
def test_report_widgets_unsupervised(self):
rep = OWReport.get_instance()
data = Table("zoo")
widgets = self.unsu_widgets
self._create_report(widgets, rep, data)
def test_report_widgets_unsupervised_dist(self):
rep = OWReport.get_instance()
data = Table("zoo")
dist = Euclidean(data)
widgets = self.dist_widgets
self._create_report(widgets, rep, dist)
def test_report_widgets_visualize(self):
rep = OWReport.get_instance()
data = Table("zoo")
widgets = self.visu_widgets
self._create_report(widgets, rep, data)
@unittest.skipIf(AnyQt.USED_API == "pyqt5", "Segfaults on PyQt5")
def test_report_widgets_all(self):
rep = OWReport.get_instance()
widgets = self.model_widgets + self.data_widgets + self.eval_widgets + \
self.unsu_widgets + self.dist_widgets + self.visu_widgets + \
self.spec_widgets
self._create_report(widgets, rep, None)
if __name__ == "__main__":
unittest.main() | null |
1,173 | import pytest
from django.utils import timezone
from unittest import mock
from osf.management.commands.approve_pending_schema_responses import approve_pending_schema_responses
from osf.models import SchemaResponse
from osf.utils.workflows import ApprovalStates
from osf_tests.factories import RegistrationFactory
from website.settings import REGISTRATION_UPDATE_APPROVAL_TIME
EXCLUDED_STATES = [state for state in ApprovalStates if state is not ApprovalStates.UNAPPROVED]
AUTO_APPROVE_TIMESTAMP = timezone.now() - REGISTRATION_UPDATE_APPROVAL_TIME
@pytest.mark.django_db
class TestApprovePendingSchemaResponses:
@pytest.fixture
def control_response(self):
reg = RegistrationFactory()
initial_response = reg.schema_responses.last()
initial_response.state = ApprovalStates.APPROVED
initial_response.save()
revision = SchemaResponse.create_from_previous_response(
previous_response=initial_response, initiator=reg.creator
)
revision.state = ApprovalStates.UNAPPROVED
revision.submitted_timestamp = AUTO_APPROVE_TIMESTAMP
revision.save()
return revision
@pytest.fixture
def test_response(self):
reg = RegistrationFactory()
initial_response = reg.schema_responses.last()
initial_response.state = ApprovalStates.APPROVED
initial_response.save()
return SchemaResponse.create_from_previous_response(
previous_response=initial_response, initiator=reg.creator
)
@pytest.mark.parametrize(
'is_moderated, expected_state',
[(False, ApprovalStates.APPROVED), (True, ApprovalStates.PENDING_MODERATION)]
)
def test_auto_approval(self, control_response, is_moderated, expected_state):
with mock.patch(
'osf.models.schema_response.SchemaResponse.is_moderated',
new_callaoble=mock.PropertyMock
) as mock_is_moderated:
mock_is_moderated.return_value = is_moderated
count = approve_pending_schema_responses()
assert count == 1
control_response.refresh_from_db()
assert control_response.state is expected_state
def METHOD_NAME(
self, control_response, test_response):
test_response.state = ApprovalStates.UNAPPROVED
test_response.submitted_timestamp = AUTO_APPROVE_TIMESTAMP
test_response.save()
count = approve_pending_schema_responses()
assert count == 2
control_response.refresh_from_db()
test_response.refresh_from_db()
assert control_response.state is ApprovalStates.APPROVED
assert test_response.state is ApprovalStates.APPROVED
@pytest.mark.parametrize('revision_state', EXCLUDED_STATES)
def test_auto_approval_only_approves_unapproved_schema_responses(
self, control_response, test_response, revision_state):
test_response.state = revision_state
test_response.submitted_timestamp = AUTO_APPROVE_TIMESTAMP
test_response.save()
count = approve_pending_schema_responses()
assert count == 1
control_response.refresh_from_db()
test_response.refresh_from_db()
assert control_response.state is ApprovalStates.APPROVED
assert test_response.state is revision_state
def test_auto_approval_only_approves_schema_responses_older_than_threshold(
self, control_response, test_response):
test_response.state = ApprovalStates.UNAPPROVED
test_response.submitted_timestamp = timezone.now()
test_response.save()
count = approve_pending_schema_responses()
assert count == 1
control_response.refresh_from_db()
test_response.refresh_from_db()
assert control_response.state is ApprovalStates.APPROVED
assert test_response.state is ApprovalStates.UNAPPROVED
def test_auto_approval_does_not_pick_up_initial_responses(
self, control_response, test_response):
test_response = test_response.previous_response
test_response.state = ApprovalStates.UNAPPROVED
test_response.submitted_timestamp = timezone.now()
test_response.save()
count = approve_pending_schema_responses()
assert count == 1
control_response.refresh_from_db()
test_response.refresh_from_db()
assert control_response.state is ApprovalStates.APPROVED
assert test_response.state is ApprovalStates.UNAPPROVED
def test_dry_run(self, control_response):
with pytest.raises(RuntimeError):
approve_pending_schema_responses(dry_run=True)
control_response.refresh_from_db()
assert control_response.state is ApprovalStates.UNAPPROVED | null |
1,174 | #
# Copyright (c) 2019-2021 Red Hat, Inc.
#
# This file is part of nmstate
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from contextlib import contextmanager
import time
import pytest
import libnmstate
from libnmstate.error import NmstateValueError
from libnmstate.error import NmstateVerificationError
from libnmstate.schema import Interface
from libnmstate.schema import InterfaceIPv4
from libnmstate.schema import InterfaceIPv6
from libnmstate.schema import InterfaceState
from .testlib import assertlib
from .testlib import cmdlib
from .testlib import statelib
from .testlib.env import nm_major_minor_version
from .testlib.genconf import gen_conf_apply
DUMMY_INTERFACE = "dummy_test"
@pytest.fixture(scope="function")
def ip_link_dummy():
cmdlib.exec_cmd(["ip", "link", "add", DUMMY_INTERFACE, "type", "dummy"])
try:
yield
finally:
cmdlib.exec_cmd(["ip", "link", "del", DUMMY_INTERFACE])
@contextmanager
def dummy_interface(name):
dummy_desired_state = {
Interface.KEY: [
{
Interface.NAME: name,
Interface.STATE: InterfaceState.UP,
Interface.IPV4: {InterfaceIPv4.ENABLED: False},
Interface.IPV6: {InterfaceIPv6.ENABLED: False},
}
]
}
libnmstate.apply(dummy_desired_state)
try:
yield dummy_desired_state
finally:
dummy_state = dummy_desired_state[Interface.KEY][0]
dummy_state[Interface.STATE] = InterfaceState.ABSENT
libnmstate.apply(dummy_desired_state)
def test_iface_description_removal(eth1_up):
desired_state = eth1_up
desired_state[Interface.KEY][0][Interface.DESCRIPTION] = "bar"
libnmstate.apply(desired_state)
current_state = statelib.show_only(("eth1",))
assert current_state[Interface.KEY][0][Interface.DESCRIPTION] == "bar"
desired_state[Interface.KEY][0][Interface.DESCRIPTION] = ""
libnmstate.apply(desired_state)
current_state = statelib.show_only(("eth1",))
assert Interface.DESCRIPTION not in current_state[Interface.KEY][0]
def test_iface_mac_address_lowercase(eth1_up):
desired_state = eth1_up
desired_state[Interface.KEY][0][Interface.MAC] = "d4:ee:07:25:42:5a"
libnmstate.apply(desired_state)
current_state = statelib.show_only(("eth1",))
assert (
current_state[Interface.KEY][0][Interface.MAC] == "D4:EE:07:25:42:5A"
)
def test_iface_mac_address_mixedcase(eth1_up):
desired_state = eth1_up
desired_state[Interface.KEY][0][Interface.MAC] = "d4:EE:07:25:42:5a"
libnmstate.apply(desired_state)
current_state = statelib.show_only(("eth1",))
assert (
current_state[Interface.KEY][0][Interface.MAC] == "D4:EE:07:25:42:5A"
)
def METHOD_NAME(ip_link_dummy):
with dummy_interface(DUMMY_INTERFACE) as dummy_desired_state:
assertlib.assert_state_match(dummy_desired_state)
current_state = statelib.show_only((DUMMY_INTERFACE,))
assert len(current_state[Interface.KEY]) == 0
def test_take_over_virtual_interface_and_rollback(ip_link_dummy):
with dummy_interface(DUMMY_INTERFACE) as dummy_desired_state:
assertlib.assert_state_match(dummy_desired_state)
dummy_desired_state[Interface.KEY][0]["invalid_key"] = "foo"
with pytest.raises((NmstateVerificationError, NmstateValueError)):
libnmstate.apply(dummy_desired_state)
time.sleep(5)
current_state = statelib.show_only((DUMMY_INTERFACE,))
assert len(current_state[Interface.KEY]) == 1
@pytest.mark.skipif(
nm_major_minor_version() < 1.31,
reason="Modifying accept-all-mac-addresses is not supported on NM.",
)
def test_enable_and_disable_accept_all_mac_addresses(eth1_up):
desired_state = eth1_up
desired_state[Interface.KEY][0][Interface.ACCEPT_ALL_MAC_ADDRESSES] = True
libnmstate.apply(desired_state)
current_state = statelib.show_only(("eth1",))
assert current_state[Interface.KEY][0][Interface.ACCEPT_ALL_MAC_ADDRESSES]
desired_state[Interface.KEY][0][Interface.ACCEPT_ALL_MAC_ADDRESSES] = False
libnmstate.apply(desired_state)
current_state = statelib.show_only(("eth1",))
eth1_state = current_state[Interface.KEY][0]
assert not eth1_state[Interface.ACCEPT_ALL_MAC_ADDRESSES]
def test_gen_conf_iface_description(eth1_up):
desired_state = {
Interface.KEY: [
{
Interface.NAME: "eth1",
Interface.DESCRIPTION: "bar",
}
]
}
with gen_conf_apply(desired_state):
assertlib.assert_state_match(desired_state) | null |
1,175 | # coding=utf-8
# Copyright 2023 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commonsense Explanations (CoS-E) Dataset."""
import json
import os
from etils import epath
from tensorflow_datasets.core.utils.lazy_imports_utils import tensorflow as tf
import tensorflow_datasets.public_api as tfds
_CITATION = """
@inproceedings{rajani2019explain,
title = "Explain Yourself! Leveraging Language models for Commonsense Reasoning",
author = "Rajani, Nazneen Fatema and
McCann, Bryan and
Xiong, Caiming and
Socher, Richard",
year="2019",
booktitle = "Proceedings of the 2019 Conference of the Association for Computational Linguistics (ACL2019)",
url ="https://arxiv.org/abs/1906.02361"
}
"""
_DESCRIPTION = """
Common Sense Explanations (CoS-E) allows for training language models to
automatically generate explanations that can be used during training and
inference in a novel Commonsense Auto-Generated Explanation (CAGE) framework.
"""
_COS_E_URL = "https://raw.githubusercontent.com/salesforce/cos-e/master/data/"
# COS E has explanations for the CQA dataset, which is joined by ID.
_CQA_URL_TRAIN = "https://s3.amazonaws.com/commensenseqa/train_rand_split.jsonl"
_CQA_URL_DEV = "https://s3.amazonaws.com/commensenseqa/dev_rand_split.jsonl"
_CQA_URL_TEST = (
"https://s3.amazonaws.com/commensenseqa/test_rand_split_no_answers.jsonl"
)
def _download_and_index_cqa(dl_manager):
"""Downloads CQA and returns it, indexed by id, for joining with Cos-E."""
downloaded_files = dl_manager.download_and_extract({
"cqa_train": _CQA_URL_TRAIN,
"cqa_dev": _CQA_URL_DEV,
"cqa_test": _CQA_URL_TEST,
})
# NB: "cqa_test" is included in the files, but not in any of the CoS-E splits.
cqa_splits = ["cqa_train", "cqa_dev"]
cqa_complete = []
for split in cqa_splits:
with tf.io.gfile.GFile(downloaded_files[split]) as f:
for _, line in enumerate(f):
d = json.loads(line)
cqa_complete.append(d)
# Index the CQA dataset by id for joining with Cos-E.
cqa_indexed = {}
for d in cqa_complete:
cqa_indexed[d["id"]] = d
return cqa_indexed
def METHOD_NAME(cqa):
"""Returns choices and the answer from a cqa example."""
choices = []
answer_key = cqa["answerKey"]
answer = None
for choice in cqa["question"]["choices"]:
choices.append(choice["text"])
if answer_key == choice["label"]:
answer = choice["text"]
return choices, answer
class CosE(tfds.core.GeneratorBasedBuilder):
"""CoS-E: Common Sense Explanations corpus."""
VERSION = tfds.core.Version("0.0.1")
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({
"id": tfds.features.Text(),
"question": tfds.features.Text(),
"choices": tfds.features.Sequence(tfds.features.Text()),
"answer": tfds.features.Text(),
"abstractive_explanation": tfds.features.Text(),
"extractive_explanation": tfds.features.Text(),
}),
supervised_keys=None,
homepage="https://github.com/salesforce/cos-e",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
# NB: The CQA Dataset should be read only once, and only by callers who
# want to _create_ the Cos-E dataset from scratch.
cqa_indexed = _download_and_index_cqa(dl_manager)
files = dl_manager.download_and_extract({
"dev": [
os.path.join(_COS_E_URL, "v1.11/dev/cose_dev_v1.11_processed.jsonl")
],
"train": [
os.path.join(
_COS_E_URL, "v1.11/train/cose_train_v1.11_processed.jsonl"
)
],
})
# We use the CoS-E/CQA dev set as our validation set.
return [
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
gen_kwargs={"files": files["dev"], "cqa_indexed": cqa_indexed},
),
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
gen_kwargs={"files": files["train"], "cqa_indexed": cqa_indexed},
),
]
def _generate_examples(self, files, **kwargs):
"""Yields examples."""
cqa_indexed = kwargs["cqa_indexed"]
for filepath in files:
with epath.Path(filepath).open() as f:
for line in f:
cos = json.loads(line)
cqa = cqa_indexed[cos["id"]]
choices, answer = METHOD_NAME(cqa)
yield cos["id"], {
"id": cos["id"],
"question": cqa["question"]["stem"],
"choices": choices,
"answer": answer,
"abstractive_explanation": cos["explanation"]["open-ended"],
"extractive_explanation": cos["explanation"]["selected"],
} | null |
1,176 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import math
from typing import Iterator, List, Optional, Tuple
import numpy as np
import torch
import torch.nn.functional as F
from overrides import overrides
from torch import nn
from archai.common.common import get_conf
from archai.common.utils import zip_eq
from archai.supergraph.nas.arch_params import ArchParams
from archai.supergraph.nas.model_desc import OpDesc
from archai.supergraph.nas.operations import Op
# TODO: reduction cell might have output reduced by 2^1=2X due to
# stride 2 through input nodes however FactorizedReduce does only
# 4X reduction. Is this correct?
class DivOp(Op):
"""The output of DivOp is weighted output of all allowed primitives.
"""
PRIMITIVES = [
'max_pool_3x3',
'avg_pool_3x3',
'skip_connect', # identity
'sep_conv_3x3',
'sep_conv_5x5',
'dil_conv_3x3',
'dil_conv_5x5',
'none' # this must be at the end so top1 doesn't choose it
]
# def _indices_of_notallowed(self):
# ''' computes indices of notallowed ops in PRIMITIVES '''
# self._not_allowed_indices = []
# for op_name in self.NOTALLOWED:
# self._not_allowed_indices.append(self.PRIMITIVES.index(op_name))
# self._not_allowed_indices = sorted(self._not_allowed_indices, reverse=True)
# def _create_mapping_valid_to_orig(self):
# ''' Creates a list with indices of the valid ops to the original list '''
# self._valid_to_orig = []
# for i, prim in enumerate(self.PRIMITIVES):
# if prim in self.NOTALLOWED:
# continue
# else:
# self._valid_to_orig.append(i)
def __init__(self, op_desc:OpDesc, arch_params:Optional[ArchParams],
affine:bool):
super().__init__()
# assume last PRIMITIVE is 'none'
assert DivOp.PRIMITIVES[-1] == 'none'
conf = get_conf()
trainer = conf['nas']['search']['divnas']['archtrainer']
finalizer = conf['nas']['search']['finalizer']
if trainer == 'noalpha' and finalizer == 'default':
raise NotImplementedError('noalpha trainer is not implemented for the default finalizer')
if trainer != 'noalpha':
self._setup_arch_params(arch_params)
else:
self._alphas = None
self._ops = nn.ModuleList()
for primitive in DivOp.PRIMITIVES:
op = Op.create(
OpDesc(primitive, op_desc.params, in_len=1, trainables=None),
affine=affine, arch_params=None)
self._ops.append(op)
# various state variables for diversity
self._collect_activations = False
self._forward_counter = 0
self._batch_activs = None
#self._indices_of_notallowed()
#self._create_mapping_valid_to_orig()
@property
def collect_activations(self)->bool:
return self._collect_activations
@collect_activations.setter
def collect_activations(self, to_collect:bool)->None:
self._collect_activations = to_collect
@property
def activations(self)->Optional[List[np.array]]:
return self._batch_activs
@property
def num_primitive_ops(self)->int:
return len(self.PRIMITIVES)
@overrides
def forward(self, x):
# save activations to object
if self._collect_activations:
self._forward_counter += 1
activs = [op(x) for op in self._ops]
# delete the activation for none type
# as we don't consider it
activs = activs[:-1]
self._batch_activs = [t.cpu().detach().numpy() for t in activs]
if self._alphas:
asm = F.softmax(self._alphas[0], dim=0)
result = sum(w * op(x) for w, op in zip(asm, self._ops))
else:
result = sum(op(x) for op in self._ops)
return result
@overrides
def ops(self)->Iterator[Tuple['Op', float]]: # type: ignore
return iter(sorted(zip_eq(self._ops,
self._alphas[0] if self._alphas is not None else [math.nan for _ in range(len(self._ops))]),
key=lambda t:t[1], reverse=True))
# def get_valid_op_desc(self, index:int)->OpDesc:
# ''' index: index in the valid index list '''
# assert index <= self.num_valid_div_ops
# orig_index = self._valid_to_orig[index]
# desc, _ = self._ops[orig_index].finalize()
# return desc
@overrides
def finalize(self) -> Tuple[OpDesc, Optional[float]]:
''' Divnas with default finalizer option needs this override else
the finalizer in base class returns the whole divop '''
with torch.no_grad():
# select except 'none' op
val, i = torch.topk(self._alphas[0][:-1], 1)
desc, _ = self._ops[i].finalize()
return desc, float(val.item())
@overrides
def METHOD_NAME(self) -> bool:
return False
def _setup_arch_params(self, arch_params:Optional[ArchParams])->None:
# do we have shared arch params?
if arch_params is None:
# create our own arch params
new_p = nn.Parameter( # TODO: use better init than uniform random?
1.0e-3*torch.randn(len(self.PRIMITIVES)), requires_grad=True)
self.create_arch_params([('alphas', new_p)])
else:
assert arch_params.has_kind('alphas')
self.set_arch_params(arch_params)
# we store alphas in list so Pytorch don't register them
self._alphas = list(self.arch_params().param_by_kind('alphas'))
assert len(self._alphas)== | null |
1,177 | import json
import unittest
import cfnlint
from taskcat._cfn.stack_url_helper import StackURLHelper
class TestStackURLHelper(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@staticmethod
def _load_template(template_path):
try:
cfn = cfnlint.decode.cfn_yaml.load(template_path)
except Exception:
# print("Exception parsing: '{}'".format(template_path))
exit(1)
return cfn
# Test TemplateURL to path extraction
def METHOD_NAME(self):
with open("tests/data/stackurlhelper/test.json") as test_file:
self.testers = json.load(test_file)
self.testers = self.testers["tests"]
total = len(self.testers)
matched = 0
for test in self.testers:
helper = StackURLHelper()
cfn = self._load_template(test["input"]["master_template"])
helper.mappings = cfn.get("Mappings")
helper.template_parameters = cfn.get("Parameters")
# Setup default parameters
default_parameters = {}
for parameter in helper.template_parameters:
properties = helper.template_parameters.get(parameter)
if "Default" in properties.keys():
default_parameters[parameter] = properties["Default"]
helper.SUBSTITUTION.update(default_parameters)
test["input"]["parameter_values"] = {}
# Inject Parameter Values
if "parameter_values" in test["input"]:
parameter_values = test["input"]["parameter_values"]
helper.SUBSTITUTION.update(parameter_values)
# print(test)
# print(test["output"]["url_paths"])
# print(helper.flatten_template_url(test["input"]["child_template"]))
if test["output"]["url_paths"] == helper.flatten_template_url(
test["input"]["child_template"],
):
matched = matched + 1
# print("matched {} total {}".format(matched, total))
self.assertEqual(matched, total)
def test_flatten_template_url_exceptions_split(self):
helper = StackURLHelper()
with self.assertRaises(Exception) as context:
helper.flatten_template_url("{'Fn::Split'}")
self.assertTrue("Fn::Split: not supported" in str(context.exception))
def test_flatten_template_url_exceptions_getatt(self):
helper = StackURLHelper()
with self.assertRaises(Exception) as context:
helper.flatten_template_url("{'Fn::GetAtt'}")
self.assertTrue("Fn::GetAtt: not supported" in str(context.exception))
def test_flatten_template_url_maxdepth(self):
helper = StackURLHelper()
with self.assertRaises(Exception) as context:
helper.flatten_template_url(
"{1{2{3{4{5{6{7{8{9{{{{{{{{{{{{21}}}}}}}}}}}}}}}}}}}}}"
)
self.assertTrue("Template URL contains more than" in str(context.exception))
# Test TemplateURL to path extraction
def test_find_local_child_template(self):
with open("tests/data/stackurlhelper/test.json") as test_file:
self.tests = json.load(test_file)
self.tests = self.tests["tests"]
total = 0
matched = 0
helper = StackURLHelper()
for test in self.tests:
index = 0
for url_path in test["output"]["url_paths"]:
total = total + 1
master_template = test["input"]["master_template"]
result = helper.find_local_child_template(master_template, url_path)
expected = test["output"]["local_paths"][index]
if str(result) == str(expected):
matched = matched + 1
index = index + 1
# print("matched {} total {}".format(matched, total))
self.assertEqual(matched, total)
def test_fn_findinmap_lookup(self):
l_mappings = {
"ami_lookup": {
"us-east-1": {"ami": "this_one", "ami2": "that_one"},
"us-east-2": {"ami": "is_this_one", "ami2": "is_that_one"},
"us-west-1": {"ami": "not_this_one", "ami2": "not_that_one"},
}
}
helper = StackURLHelper()
helper.mappings = l_mappings
mappings_map = "ami_lookup"
first_key = "us-west-1"
final_key = "ami2"
result = helper.find_in_map_lookup(mappings_map, first_key, final_key)
self.assertEqual(result, "not_that_one")
# TODO: Test all the individual functions
# TODO: Test fn_sub logic
# def test_fn_sub(self):
# self.assertEqual(True, False)
# TODO: Test local path Discovery
# def test_fn_if(self):
# self.assertEqual(True, False) | null |
1,178 | import os
import random
import tempfile
from string import ascii_uppercase
from unittest import TestCase
from unittest.mock import MagicMock
from minigalaxy.constants import DOWNLOAD_CHUNK_SIZE
from minigalaxy.download import Download, DownloadType
from minigalaxy.download_manager import DownloadManager
class TestDownloadManager(TestCase):
def test_download_operation(self):
session = MagicMock()
download_request = MagicMock()
session.get.return_value = download_request
chunk1 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
chunk2 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
chunk3 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
download_request.iter_content.return_value = [chunk1, chunk2, chunk3]
download_request.headers.get.return_value = len(chunk1) + len(chunk2) + len(chunk3)
download_manager = DownloadManager(session)
progress_func = MagicMock()
finish_func = MagicMock()
cancel_func = MagicMock()
temp_file = tempfile.mktemp()
download = Download("example.com", temp_file, DownloadType.GAME, finish_func, progress_func, cancel_func)
download_manager._DownloadManager__download_operation(download, 0, "wb")
expected = chunk1 + chunk2 + chunk3
with open(temp_file) as content:
actual = content.read().encode('utf-8')
self.assertEqual(expected, actual)
# Clean up temp_file
os.remove(temp_file)
self.assertFalse(os.path.isfile(temp_file))
download_request.headers.get.assert_called_once()
download_request.iter_content.assert_called_once()
self.assertEqual(3 + 2, progress_func.call_count)
self.assertEqual(0, finish_func.call_count)
self.assertEqual(0, cancel_func.call_count)
def test_download_operation_still_downloads_without_content_length(self):
session = MagicMock()
download_request = MagicMock()
session.get.return_value = download_request
chunk1 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
chunk2 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
chunk3 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
download_request.iter_content.return_value = [chunk1, chunk2, chunk3]
download_request.headers.get.side_effect = TypeError
download_manager = DownloadManager(session)
progress_func = MagicMock()
finish_func = MagicMock()
cancel_func = MagicMock()
temp_file = tempfile.mktemp()
download = Download("example.com", temp_file, DownloadType.GAME, finish_func, progress_func, cancel_func)
download_manager._DownloadManager__download_operation(download, 0, "wb")
expected = chunk1 + chunk2 + chunk3
with open(temp_file) as content:
actual = content.read().encode('utf-8')
self.assertEqual(expected, actual)
# Clean up temp_file
os.remove(temp_file)
self.assertFalse(os.path.isfile(temp_file))
download_request.headers.get.assert_called_once()
download_request.iter_content.assert_called_once()
self.assertEqual(2, progress_func.call_count)
self.assertEqual(0, finish_func.call_count)
self.assertEqual(0, cancel_func.call_count)
def test_download_operation_cancel_download(self):
session = MagicMock()
download_request = MagicMock()
session.get.return_value = download_request
chunk1 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
chunk2 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
chunk3 = bytes(random.choices(ascii_uppercase.encode('utf-8'), k=DOWNLOAD_CHUNK_SIZE))
download_request.iter_content.return_value = [chunk1, chunk2, chunk3]
download_request.headers.get.side_effect = TypeError
download_manager = DownloadManager(session)
progress_func = MagicMock()
finish_func = MagicMock()
cancel_func = MagicMock()
temp_file = tempfile.mktemp()
download = Download("example.com", temp_file, DownloadType.GAME, finish_func, progress_func, cancel_func)
download_manager.active_downloads[download] = download
download_manager.cancel_download(download)
download_manager._DownloadManager__download_operation(download, 0, "wb")
expected = chunk1
with open(temp_file) as content:
actual = content.read().encode('utf-8')
self.assertEqual(expected, actual)
# Clean up temp_file
os.remove(temp_file)
self.assertFalse(os.path.isfile(temp_file))
download_request.headers.get.assert_called_once()
download_request.iter_content.assert_called_once()
self.assertEqual(1, progress_func.call_count)
self.assertEqual(0, finish_func.call_count)
self.assertEqual(0, cancel_func.call_count)
def METHOD_NAME(self):
session = MagicMock()
download_manager = DownloadManager(session)
progress_func = MagicMock()
finish_func = MagicMock()
cancel_func = MagicMock()
temp_file = tempfile.mktemp()
download = Download("example.com", temp_file, DownloadType.GAME, finish_func, progress_func, cancel_func)
download_manager.download(download)
download_manager.cancel_download(download)
cancel_func.assert_called_once()
for queue in download_manager.queues:
for i in queue:
self.assertNotEqual(i, download)
self.assertFalse(os.path.isfile(temp_file)) | null |
1,179 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkga.endpoint import endpoint_data
class CreateBasicAcceleratorRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ga', '2019-11-20', 'CreateBasicAccelerator','gaplus')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_AutoUseCoupon(self): # String
return self.get_query_params().get('AutoUseCoupon')
def set_AutoUseCoupon(self, AutoUseCoupon): # String
self.add_query_param('AutoUseCoupon', AutoUseCoupon)
def get_AutoRenewDuration(self): # Integer
return self.get_query_params().get('AutoRenewDuration')
def set_AutoRenewDuration(self, AutoRenewDuration): # Integer
self.add_query_param('AutoRenewDuration', AutoRenewDuration)
def get_Duration(self): # Integer
return self.get_query_params().get('Duration')
def set_Duration(self, Duration): # Integer
self.add_query_param('Duration', Duration)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def METHOD_NAME(self): # String
return self.get_query_params().get('PromotionOptionNo')
def set_PromotionOptionNo(self, PromotionOptionNo): # String
self.add_query_param('PromotionOptionNo', PromotionOptionNo)
def get_BandwidthBillingType(self): # String
return self.get_query_params().get('BandwidthBillingType')
def set_BandwidthBillingType(self, BandwidthBillingType): # String
self.add_query_param('BandwidthBillingType', BandwidthBillingType)
def get_AutoRenew(self): # Boolean
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self, AutoRenew): # Boolean
self.add_query_param('AutoRenew', AutoRenew)
def get_ChargeType(self): # String
return self.get_query_params().get('ChargeType')
def set_ChargeType(self, ChargeType): # String
self.add_query_param('ChargeType', ChargeType)
def get_PricingCycle(self): # String
return self.get_query_params().get('PricingCycle')
def set_PricingCycle(self, PricingCycle): # String
self.add_query_param('PricingCycle', PricingCycle) | null |
1,180 | #!/usr/bin/env python
#-- coding:UTF-8 --
# Copyright 2022 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Script function
# Select all routines supported by the current development board under the current IDF version from list.json
import re
import os
import argparse
IDF_SUPPORT_CHIP = {'722043f734':['v33'],
'release/v3.3':['v33'],
'release/v4.1':['v41'],
'release/v4.2':['v42'],
'release/v4.3':['v43'],
'v4.3.4':['v43'],
'release/v4.4':['v44'],
'release/v5.0':['v50']}
ADF_PATH = os.getenv('ADF_PATH')
if not ADF_PATH:
print('Please set ADF_PATH before running this script')
raise SystemExit(-1)
BUILD_PATH = os.getenv('BUILD_PATH')
if not BUILD_PATH:
print('Please set BUILD_PATH before running this script')
raise SystemExit(-1)
APPS_JSON_PATH = ADF_PATH + '/tools/ci/apps_v4_4.json'
LIST_JSON_PATH = BUILD_PATH + '/list.json'
LIST_JSON_BK_PATH = BUILD_PATH + '/list_backup.json'
# Filter examples
# Remove examples that are not supported by this development version under the current version.
def METHOD_NAME(reserve_example):
if os.path.exists(LIST_JSON_PATH):
os.rename(LIST_JSON_PATH, LIST_JSON_BK_PATH)
list_json = open(LIST_JSON_PATH, 'a')
list_backup = open(LIST_JSON_BK_PATH, 'r', encoding='utf8', errors='ignore').readlines()
for i in range(len(reserve_example)):
single_example_str = '\{\"build_system\"\: \"cmake\"\, \"app_dir\"\: \"'+str(reserve_example[i])+'.*?\}'
single_example_json = re.findall(single_example_str, str(list_backup))
print(single_example_json)
list_json.write(str(single_example_json[0])+'\n')
# Parse apps_v4_4.json
# Get all the examples supported by the current development board under the idf_branch version from 'apps.josn'.
def parse_apps_json(idf_branch, board):
apps_json = open(APPS_JSON_PATH, 'r', encoding='utf8', errors='ignore').readlines()
list_json = open(LIST_JSON_PATH, 'r', encoding='utf8', errors='ignore').readlines()
list_examples = re.findall('"app_dir": "(.*?)", "work_dir"',str(list_json))
print(list_examples)
support_examples = list()
for example in list_examples:
single_example = re.findall('/examples/(.*)',str(example))
board_type = re.findall(str(single_example[0]) +'(.*?)}',str(apps_json))
idf_version = re.findall(board + ':\[(.*?)];',str(board_type))
if not idf_version:
print("need rm:" + example)
if idf_branch in str(idf_version):
support_examples.append(example)
elif '-' in str(idf_version):
idf_version_range = re.findall('v[0-9][0-9]',str(idf_version))
if idf_version_range is not None:
if idf_branch > idf_version_range[0] and idf_branch < idf_version_range[1]:
support_examples.append(example)
print(support_examples)
return support_examples
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--board", "-B",
help="Set audio hal",
default='ESP_LYRAT_V4_3')
parser.add_argument("--branch", "-b",
help="Set branch",
default='release/v4.4')
args = parser.parse_args()
examples_list = list()
examples_list = parse_apps_json(str(IDF_SUPPORT_CHIP[args.branch][0]), args.board)
METHOD_NAME(examples_list) | null |
1,181 | # Copyright (c) 2018 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.SimObject import *
from m5.params import *
# QoS scheduler policy used to serve incoming transaction
class QoSPolicy(SimObject):
type = "QoSPolicy"
abstract = True
cxx_header = "mem/qos/policy.hh"
cxx_class = "gem5::memory::qos::Policy"
class QoSFixedPriorityPolicy(QoSPolicy):
type = "QoSFixedPriorityPolicy"
cxx_header = "mem/qos/policy_fixed_prio.hh"
cxx_class = "gem5::memory::qos::FixedPriorityPolicy"
cxx_exports = [
PyBindMethod("initRequestorName"),
PyBindMethod("initRequestorObj"),
]
_requestor_priorities = None
def setRequestorPriority(self, request_port, priority):
if not self._requestor_priorities:
self._requestor_priorities = []
self._requestor_priorities.append([request_port, priority])
def setMasterPriority(self, request_port, priority):
warn(
"QosFixedPriority.setMasterPriority is deprecated in favor of "
"setRequestorPriority. See src/mem/qos/QoSPolicy.py for more "
"information"
)
self.setRequestorPriority(request_port, priority)
def init(self):
if not self._requestor_priorities:
print(
"Error,"
"use setRequestorPriority to init requestors/priorities\n"
)
exit(1)
else:
for prio in self._requestor_priorities:
request_port = prio[0]
priority = prio[1]
if isinstance(request_port, str):
self.getCCObject().initRequestorName(
request_port, int(priority)
)
else:
self.getCCObject().initRequestorObj(
request_port.getCCObject(), priority
)
# default fixed priority value for non-listed Requestors
qos_fixed_prio_default_prio = Param.UInt8(
0, "Default priority for non-listed Requestors"
)
class QoSPropFairPolicy(QoSPolicy):
type = "QoSPropFairPolicy"
cxx_header = "mem/qos/policy_pf.hh"
cxx_class = "gem5::memory::qos::PropFairPolicy"
cxx_exports = [
PyBindMethod("initRequestorName"),
PyBindMethod("initRequestorObj"),
]
_requestor_scores = None
def METHOD_NAME(self, request_port, score):
if not self._requestor_scores:
self._requestor_scores = []
self._requestor_scores.append([request_port, score])
def init(self):
if not self._requestor_scores:
print("Error, use setInitialScore to init requestors/scores\n")
exit(1)
else:
for prio in self._requestor_scores:
request_port = prio[0]
score = prio[1]
if isinstance(request_port, str):
self.getCCObject().initRequestorName(
request_port, float(score)
)
else:
self.getCCObject().initRequestorObj(
request_port.getCCObject(), float(score)
)
weight = Param.Float(0.5, "Pf score weight") | null |
1,182 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from abc import ABC, abstractmethod
from typing import Optional, Tuple
from overrides import EnforceOverrides
from archai.common import common, utils
from archai.common.config import Config
from archai.supergraph.nas.arch_trainer import TArchTrainer
from archai.supergraph.nas.evaluater import EvalResult, Evaluater
from archai.supergraph.nas.finalizers import Finalizers
from archai.supergraph.nas.model_desc_builder import ModelDescBuilder
from archai.supergraph.nas.random_finalizers import RandomFinalizers
from archai.supergraph.nas.searcher import Searcher, SearchResult
class ExperimentRunner(ABC, EnforceOverrides):
def __init__(self, config_filename:str, base_name:str, clean_expdir=False) -> None:
self.config_filename = config_filename
self.base_name = base_name
self.clean_expdir = clean_expdir
def METHOD_NAME(self, conf_search:Config)->SearchResult:
model_desc_builder = self.model_desc_builder()
trainer_class = self.trainer_class()
finalizers = self.finalizers()
search = self.searcher()
return search.search(conf_search, model_desc_builder, trainer_class, finalizers)
def run_eval(self, conf_eval:Config)->EvalResult:
evaler = self.evaluater()
return evaler.evaluate(conf_eval,
model_desc_builder=self.model_desc_builder())
def run(self, search=True, eval=True) \
->Tuple[Optional[SearchResult], Optional[EvalResult]]:
search_result, eval_result = None, None
if search: # run search
conf = self._init_conf(True, clean_expdir=self.clean_expdir)
search_result = self.METHOD_NAME(conf['nas']['search'])
if eval:
conf = self.get_conf(False)
common.clean_ensure_expdir(conf, clean_dir=self.clean_expdir, ensure_dir=True)
if search:
# first copy search result to eval, otherwise we expect eval config to point to results
self.copy_search_to_eval()
conf = self._init_conf(False, clean_expdir=False)
eval_result = self.run_eval(conf['nas']['eval'])
return search_result, eval_result
def copy_search_to_eval(self)->None:
# do not cache conf_search or conf_eval as it may have values that
# needs env var expansion.
# get desc file path that search has produced
conf_search = self.get_conf(True)['nas']['search']
search_desc_filename = conf_search['final_desc_filename']
search_desc_filepath = utils.full_path(search_desc_filename)
assert search_desc_filepath and os.path.exists(search_desc_filepath)
# get file path that eval would need
conf_eval = self.get_conf(False)['nas']['eval']
eval_desc_filename = conf_eval['final_desc_filename']
eval_desc_filepath = utils.full_path(eval_desc_filename)
assert eval_desc_filepath
utils.copy_file(search_desc_filepath, eval_desc_filepath)
def model_desc_builder(self)->Optional[ModelDescBuilder]:
return ModelDescBuilder() # default model desc builder puts nodes with no edges
def searcher(self)->Searcher:
return Searcher()
def evaluater(self)->Evaluater:
return Evaluater()
@abstractmethod
def trainer_class(self)->TArchTrainer:
pass
def finalizers(self)->Finalizers:
conf = common.get_conf()
finalizer = conf['nas']['search']['finalizer']
if not finalizer or finalizer == 'default':
return Finalizers()
elif finalizer == 'random':
return RandomFinalizers()
else:
raise NotImplementedError
def get_expname(self, is_search_or_eval:bool)->str:
return self.base_name + ('_search' if is_search_or_eval else '_eval')
def get_conf(self, is_search_or_eval:bool)->Config:
conf = common.create_conf(config_filepath=self.config_filename,
param_args=['--common.experiment_name', self.get_expname(is_search_or_eval)])
common.update_envvars(conf) # config paths might include env vars
return conf
def _init_conf(self, is_search_or_eval:bool, clean_expdir:bool)->Config:
config_filename = self.config_filename
conf = common.common_init(config_filepath=config_filename,
param_args=['--common.experiment_name', self.get_expname(is_search_or_eval),
], clean_expdir=clean_expdir)
return conf
| null |
1,183 | """Unit tests for the PAV backend in Annif"""
import logging
from datetime import datetime, timedelta, timezone
import py.path
import pytest
import annif.backend
import annif.corpus
from annif.exception import NotSupportedException
def test_pav_default_params(document_corpus, app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(backend_id="pav", config_params={}, project=app_project)
expected_default_params = {
"min-docs": 10,
"limit": 100,
}
actual_params = pav.params
for param, val in expected_default_params.items():
assert param in actual_params and actual_params[param] == val
def test_pav_is_not_trained(app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
assert not pav.is_trained
def test_pav_train(tmpdir, app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
tmpfile = tmpdir.join("document.tsv")
tmpfile.write(
"dummy\thttp://example.org/dummy\n"
+ "another\thttp://example.org/dummy\n"
+ "none\thttp://example.org/none"
)
document_corpus = annif.corpus.DocumentFile(str(tmpfile), app_project.subjects)
pav.train(document_corpus)
datadir = py.path.local(app_project.datadir)
assert datadir.join("pav-model-dummy-fi").exists()
assert datadir.join("pav-model-dummy-fi").size() > 0
def test_pav_train_cached(app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
with pytest.raises(NotSupportedException):
pav.train("cached")
def test_pav_train_nodocuments(app_project, empty_corpus):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
with pytest.raises(NotSupportedException) as excinfo:
pav.train(empty_corpus)
assert "training backend pav with no documents" in str(excinfo.value)
def test_pav_initialize(app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
assert pav._models is None
pav.initialize()
assert pav._models is not None
# initialize a second time - this shouldn't do anything
pav.initialize()
def METHOD_NAME(app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
results = pav.suggest(
[
"""Arkeologiaa sanotaan joskus myös
muinaistutkimukseksi tai muinaistieteeksi. Se on humanistinen tiede
tai oikeammin joukko tieteitä, jotka tutkivat ihmisen menneisyyttä.
Tutkimusta tehdään analysoimalla muinaisjäännöksiä eli niitä jälkiä,
joita ihmisten toiminta on jättänyt maaperään tai vesistöjen
pohjaan."""
]
)[0]
assert len(pav._models["dummy-fi"]) == 1
assert len(results) > 0
assert list(results)[0].score == pytest.approx(2 / 3) # PAV recalculated score
def test_pav_train_params(tmpdir, app_project, caplog):
logger = annif.logger
logger.propagate = True
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
tmpfile = tmpdir.join("document.tsv")
tmpfile.write(
"dummy\thttp://example.org/dummy\n"
+ "another\thttp://example.org/dummy\n"
+ "none\thttp://example.org/none"
)
document_corpus = annif.corpus.DocumentFile(str(tmpfile), app_project.subjects)
params = {"min-docs": 5}
with caplog.at_level(logging.DEBUG):
pav.train(document_corpus, params)
parameters_spec = "creating PAV model for source dummy-fi, min_docs=5"
assert parameters_spec in caplog.text
def test_pav_suggest_after_min_docs(app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
results = pav.suggest(
[
"""Arkeologiaa sanotaan joskus myös
muinaistutkimukseksi tai muinaistieteeksi. Se on humanistinen tiede
tai oikeammin joukko tieteitä, jotka tutkivat ihmisen menneisyyttä.
Tutkimusta tehdään analysoimalla muinaisjäännöksiä eli niitä jälkiä,
joita ihmisten toiminta on jättänyt maaperään tai vesistöjen
pohjaan."""
]
)[0]
assert len(pav._models["dummy-fi"]) == 0
assert len(results) > 0
print(results)
print(list(results)[0])
assert list(results)[0].score == 1.0 # original score from dummy-fi project
def test_pav_is_trained(app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
assert pav.is_trained
def test_pav_modification_time(app_project):
pav_type = annif.backend.get_backend("pav")
pav = pav_type(
backend_id="pav",
config_params={"limit": 50, "min-docs": 2, "sources": "dummy-fi"},
project=app_project,
)
assert datetime.now(timezone.utc) - pav.modification_time < timedelta(1) | null |
1,184 | # Copyright (c) 2016, 2020-2021 Arm Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2008-2009 The Hewlett-Packard Development Company
# Copyright (c) 2004-2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import re
import sys
from functools import wraps
from . import convert
from .attrdict import attrdict, multiattrdict, optiondict
from .multidict import multidict
# panic() should be called when something happens that should never
# ever happen regardless of what the user does (i.e., an acutal m5
# bug).
def panic(fmt, *args):
print("panic:", fmt % args, file=sys.stderr)
sys.exit(1)
# fatal() should be called when the simulation cannot continue due to
# some condition that is the user's fault (bad configuration, invalid
# arguments, etc.) and not a simulator bug.
def fatal(fmt, *args):
print("fatal:", fmt % args, file=sys.stderr)
sys.exit(1)
# warn() should be called when the user should be warned about some condition
# that may or may not be the user's fault, but that they should be made aware
# of as it may affect the simulation or results.
def warn(fmt, *args):
print("warn:", fmt % args, file=sys.stderr)
# inform() should be called when the user should be informed about some
# condition that they may be interested in.
def inform(fmt, *args):
print("info:", fmt % args, file=sys.stdout)
def callOnce(func):
"""Decorator that enables to run a given function only once. Subsequent
calls are discarded."""
@wraps(func)
def wrapper(*args, **kwargs):
if not wrapper.has_run:
wrapper.has_run = True
return func(*args, **kwargs)
wrapper.has_run = False
return wrapper
def deprecated(replacement=None, logger=warn):
"""This decorator warns the user about a deprecated function."""
def decorator(func):
@callOnce
def notifyDeprecation():
try:
func_name = lambda f: f.__module__ + "." + f.__qualname__
message = f"Function {func_name(func)} is deprecated."
if replacement:
message += f" Prefer {func_name(replacement)} instead."
except AttributeError:
message = f"Function {func} is deprecated."
if replacement:
message += f" Prefer {replacement} instead."
logger(message)
@wraps(func)
def wrapper(*args, **kwargs):
notifyDeprecation()
return func(*args, **kwargs)
return wrapper
return decorator
class Singleton(type):
def __call__(cls, *args, **kwargs):
if hasattr(cls, "_instance"):
return cls._instance
cls._instance = super().__call__(*args, **kwargs)
return cls._instance
def addToPath(path):
"""Prepend given directory to system module search path. We may not
need this anymore if we can structure our config library more like a
Python package."""
# if it's a relative path and we know what directory the current
# python script is in, make the path relative to that directory.
if not os.path.isabs(path) and sys.path[0]:
path = os.path.join(sys.path[0], path)
path = os.path.realpath(path)
# sys.path[0] should always refer to the current script's directory,
# so place the new dir right after that.
sys.path.insert(1, path)
def repoPath():
"""
Return the abspath of the gem5 repository.
This is relying on the following structure:
<gem5-repo>/build/<ISA>/gem5.[opt,debug...]
"""
return os.path.dirname(os.path.dirname(os.path.dirname(sys.executable)))
# Apply method to object.
# applyMethod(obj, 'meth', <args>) is equivalent to obj.meth(<args>)
def applyMethod(obj, meth, *args, **kwargs):
return getattr(obj, meth)(*args, **kwargs)
# If the first argument is an (non-sequence) object, apply the named
# method with the given arguments. If the first argument is a
# sequence, apply the method to each element of the sequence (a la
# 'map').
def applyOrMap(objOrSeq, meth, *args, **kwargs):
if not isinstance(objOrSeq, (list, tuple)):
return applyMethod(objOrSeq, meth, *args, **kwargs)
else:
return [applyMethod(o, meth, *args, **kwargs) for o in objOrSeq]
def METHOD_NAME(items):
if len(items) == 1:
for i in items[0]:
yield (i,)
else:
for i in items[0]:
for j in METHOD_NAME(items[1:]):
yield (i,) + j
def flatten(items):
while items:
item = items.pop(0)
if isinstance(item, (list, tuple)):
items[0:0] = item
else:
yield item
# force scalars to one-element lists for uniformity
def makeList(objOrList):
if isinstance(objOrList, list):
return objOrList
return [objOrList]
def printList(items, indent=4):
line = " " * indent
for i, item in enumerate(items):
if len(line) + len(item) > 76:
print(line)
line = " " * indent
if i < len(items) - 1:
line += f"{item}, "
else:
line += item
print(line)
def isInteractive():
"""Check if the simulator is run interactively or in a batch environment"""
return sys.__stdin__.isatty() | null |
1,185 | # Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
import re
import pykube
import argparse
from azure.common.client_factory import get_client_from_auth_file, get_client_from_cli_profile
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.compute import ComputeManagementClient
RUN_ID_LABEL = 'runid'
CLOUD_REGION_LABEL = 'cloud_region'
KUBE_CONFIG_PATH = '~/.kube/config'
LOW_PRIORITY_INSTANCE_ID_TEMPLATE = '(az-[a-z0-9]{16})[0-9A-Z]{6}'
auth_file = os.environ.get('AZURE_AUTH_LOCATION', None)
if auth_file:
res_client = get_client_from_auth_file(ResourceManagementClient, auth_path=auth_file)
compute_client = get_client_from_auth_file(ComputeManagementClient, auth_path=auth_file)
else:
res_client = get_client_from_cli_profile(ResourceManagementClient)
compute_client = get_client_from_cli_profile(ComputeManagementClient)
resource_group_name = os.environ["AZURE_RESOURCE_GROUP"]
def resolve_azure_api(resource):
""" This method retrieves the latest non-preview api version for
the given resource (unless the preview version is the only available
api version) """
provider = res_client.providers.get(resource.id.split('/')[6])
rt = next((t for t in provider.resource_types
if t.resource_type.lower() == '/'.join(resource.type.split('/')[1:]).lower()), None)
if rt and 'api_versions' in rt.__dict__:
api_version = [v for v in rt.__dict__['api_versions'] if 'preview' not in v.lower()]
return api_version[0] if api_version else rt.__dict__['api_versions'][0]
def azure_resource_type_cmp(r1, r2):
if str(r1.type).split('/')[-1].lower().startswith("virtualmachine"):
return -1
elif str(r1.type).split('/')[-1].lower() == "networkinterfaces" and not str(r2.type).split('/')[-1].lower().startswith("virtualmachine"):
return -1
return 0
def delete_cloud_node(node_name):
low_priority_search = re.search(LOW_PRIORITY_INSTANCE_ID_TEMPLATE, node_name)
if low_priority_search:
# just because we set computer_name_prefix in nodeup script,
# we know that it is the same with scale set name, so let's extract it
scale_set_name = low_priority_search.group(1)
info = compute_client.virtual_machine_scale_sets.get(resource_group_name, scale_set_name)
else:
info = compute_client.virtual_machines.get(resource_group_name, node_name)
if info is not None and "Name" in info.tags:
resources = []
for resource in res_client.resources.list(filter="tagName eq 'Name' and tagValue eq '" + info.tags["Name"] + "'"):
resources.append(resource)
# we need to sort resources to be sure that vm and nic will be deleted first, because it has attached resorces(disks and ip)
resources.sort(key=functools.cmp_to_key(azure_resource_type_cmp))
for resource in resources:
res_client.resources.delete(
resource_group_name=resource.id.split('/')[4],
resource_provider_namespace=resource.id.split('/')[6],
parent_resource_path='',
resource_type=str(resource.type).split('/')[-1],
resource_name=resource.name,
api_version=resolve_azure_api(resource),
parameters=resource
).wait()
def delete_kubernetes_node(kube_api, node_name):
if node_name is not None and get_node(kube_api, node_name) is not None:
obj = {
"apiVersion": "v1",
"kind": "Node",
"metadata": {
"name": node_name
}
}
pykube.Node(kube_api, obj).delete()
def get_node(kube_api, nodename):
nodes = pykube.Node.objects(kube_api).filter(field_selector={'metadata.name': nodename})
if len(nodes.response['items']) == 0:
return None
return nodes.response['items'][0]
def get_kube_api():
try:
api = pykube.HTTPClient(pykube.KubeConfig.from_service_account())
except Exception as e:
api = pykube.HTTPClient(pykube.KubeConfig.from_file(KUBE_CONFIG_PATH))
api.session.verify = False
return api
def METHOD_NAME():
parser = argparse.ArgumentParser()
parser.add_argument("--internal_ip", "-ip", type=str, required=True)
parser.add_argument("--node_name", "-n", type=str, required=True)
args, unknown = parser.parse_known_args()
kube_api = get_kube_api()
delete_kubernetes_node(kube_api, args.node_name)
delete_cloud_node(args.node_name)
if __name__ == '__main__':
METHOD_NAME() | null |
1,186 | # Use this extension for showing layer status with three leds
import pwmio
import time
from kmk.extensions import Extension, InvalidExtensionEnvironment
from kmk.keys import make_key
class statusLED(Extension):
def __init__(
self,
led_pins,
brightness=30,
brightness_step=5,
brightness_limit=100,
):
self._leds = []
for led in led_pins:
try:
self._leds.append(pwmio.PWMOut(led))
except Exception as e:
print(e)
raise InvalidExtensionEnvironment(
'Unable to create pulseio.PWMOut() instance with provided led_pin'
)
self._led_count = len(self._leds)
self.brightness = brightness
self._layer_last = -1
self.brightness_step = brightness_step
self.brightness_limit = brightness_limit
make_key(names=('SLED_INC',), on_press=self._key_led_inc)
make_key(names=('SLED_DEC',), on_press=self._key_led_dec)
def _layer_indicator(self, layer_active, *args, **kwargs):
'''
Indicates layer with leds
For the time being just a simple consecutive single led
indicator. And when there are more layers than leds it
wraps around to the first led again.
(Also works for a single led, which just lights when any
layer is active)
'''
if self._layer_last != layer_active:
led_last = 0 if self._layer_last == 0 else 1 + (self._layer_last - 1) % 3
if layer_active > 0:
led_active = 0 if layer_active == 0 else 1 + (layer_active - 1) % 3
self.set_brightness(self.brightness, led_active)
self.set_brightness(0, led_last)
else:
self.set_brightness(0, led_last)
self._layer_last = layer_active
def __repr__(self):
return f'SLED({self._to_dict()})'
def _to_dict(self):
return {
'_brightness': self.brightness,
'brightness_step': self.brightness_step,
'brightness_limit': self.brightness_limit,
}
def on_runtime_enable(self, sandbox):
return
def on_runtime_disable(self, sandbox):
return
def during_bootup(self, sandbox):
'''Light up every single led once for 200 ms'''
for i in range(self._led_count + 2):
if i < self._led_count:
self._leds[i].duty_cycle = int(self.brightness / 100 * 65535)
i_off = i - 2
if i_off >= 0 and i_off < self._led_count:
self._leds[i_off].duty_cycle = int(0)
time.sleep(0.1)
for led in self._leds:
led.duty_cycle = int(0)
return
def before_matrix_scan(self, sandbox):
return
def after_matrix_scan(self, sandbox):
self._layer_indicator(sandbox.active_layers[0])
return
def before_hid_send(self, sandbox):
return
def after_hid_send(self, sandbox):
return
def on_powersave_enable(self, sandbox):
self.set_brightness(0)
return
def METHOD_NAME(self, sandbox):
self.set_brightness(self._brightness)
self._leds[2].duty_cycle = int(50 / 100 * 65535)
time.sleep(0.2)
self._leds[2].duty_cycle = int(0)
return
def set_brightness(self, percent, layer_id=-1):
if layer_id < 0:
for led in self._leds:
led.duty_cycle = int(percent / 100 * 65535)
else:
self._leds[layer_id - 1].duty_cycle = int(percent / 100 * 65535)
def increase_brightness(self, step=None):
if not step:
self._brightness += self.brightness_step
else:
self._brightness += step
if self._brightness > 100:
self._brightness = 100
self.set_brightness(self._brightness, self._layer_last)
def decrease_brightness(self, step=None):
if not step:
self._brightness -= self.brightness_step
else:
self._brightness -= step
if self._brightness < 0:
self._brightness = 0
self.set_brightness(self._brightness, self._layer_last)
def _key_led_inc(self, *args, **kwargs):
self.increase_brightness()
def _key_led_dec(self, *args, **kwargs):
self.decrease_brightness() | null |
1,187 | from pathlib import Path
from typing import Callable, FrozenSet, List, Set, Tuple, Union
import numpy as np
from sc2.position import Point2
class PixelMap:
def __init__(self, proto, in_bits: bool = False):
"""
:param proto:
:param in_bits:
"""
self._proto = proto
# Used for copying pixelmaps
self._in_bits: bool = in_bits
assert self.width * self.height == (8 if in_bits else 1) * len(
self._proto.data
), f"{self.width * self.height} {(8 if in_bits else 1)*len(self._proto.data)}"
buffer_data = np.frombuffer(self._proto.data, dtype=np.uint8)
if in_bits:
buffer_data = np.unpackbits(buffer_data)
self.data_numpy = buffer_data.reshape(self._proto.size.y, self._proto.size.x)
@property
def width(self) -> int:
return self._proto.size.x
@property
def height(self) -> int:
return self._proto.size.y
@property
def bits_per_pixel(self) -> int:
return self._proto.bits_per_pixel
@property
def bytes_per_pixel(self) -> int:
return self._proto.bits_per_pixel // 8
def __getitem__(self, pos: Tuple[int, int]) -> int:
""" Example usage: is_pathable = self._game_info.pathing_grid[Point2((20, 20))] != 0 """
assert 0 <= pos[0] < self.width, f"x is {pos[0]}, self.width is {self.width}"
assert 0 <= pos[1] < self.height, f"y is {pos[1]}, self.height is {self.height}"
return int(self.data_numpy[pos[1], pos[0]])
def __setitem__(self, pos: Tuple[int, int], value: int):
""" Example usage: self._game_info.pathing_grid[Point2((20, 20))] = 255 """
assert 0 <= pos[0] < self.width, f"x is {pos[0]}, self.width is {self.width}"
assert 0 <= pos[1] < self.height, f"y is {pos[1]}, self.height is {self.height}"
assert (
0 <= value <= 254 * self._in_bits + 1
), f"value is {value}, it should be between 0 and {254 * self._in_bits + 1}"
assert isinstance(value, int), f"value is of type {type(value)}, it should be an integer"
self.data_numpy[pos[1], pos[0]] = value
def is_set(self, p: Tuple[int, int]) -> bool:
return self[p] != 0
def is_empty(self, p: Tuple[int, int]) -> bool:
return not self.is_set(p)
def copy(self) -> "PixelMap":
return PixelMap(self._proto, in_bits=self._in_bits)
def flood_fill(self, start_point: Point2, pred: Callable[[int], bool]) -> Set[Point2]:
nodes: Set[Point2] = set()
queue: List[Point2] = [start_point]
while queue:
x, y = queue.pop()
if not (0 <= x < self.width and 0 <= y < self.height):
continue
if Point2((x, y)) in nodes:
continue
if pred(self[x, y]):
nodes.add(Point2((x, y)))
queue += [Point2((x + a, y + b)) for a in [-1, 0, 1] for b in [-1, 0, 1] if not (a == 0 and b == 0)]
return nodes
def METHOD_NAME(self, pred: Callable[[int], bool]) -> Set[FrozenSet[Point2]]:
groups: Set[FrozenSet[Point2]] = set()
for x in range(self.width):
for y in range(self.height):
if any((x, y) in g for g in groups):
continue
if pred(self[x, y]):
groups.add(frozenset(self.flood_fill(Point2((x, y)), pred)))
return groups
def print(self, wide: bool = False) -> None:
for y in range(self.height):
for x in range(self.width):
print("#" if self.is_set((x, y)) else " ", end=(" " if wide else ""))
print("")
def save_image(self, filename: Union[str, Path]):
data = [(0, 0, self[x, y]) for y in range(self.height) for x in range(self.width)]
# pylint: disable=C0415
from PIL import Image
im = Image.new("RGB", (self.width, self.height))
im.putdata(data) # type: ignore
im.save(filename)
def plot(self):
# pylint: disable=C0415
import matplotlib.pyplot as plt
plt.imshow(self.data_numpy, origin="lower")
plt.show() | null |
1,188 | import logging
from framework.celery_tasks.handlers import enqueue_task
from website import settings
logger = logging.getLogger(__name__)
if settings.SEARCH_ENGINE == 'elastic':
import website.search.elastic_search as search_engine
else:
search_engine = None
logger.warn('Elastic search is not set to load')
def requires_search(func):
def wrapped(*args, **kwargs):
if search_engine is not None and not settings.RUNNING_MIGRATION:
return func(*args, **kwargs)
return wrapped
@requires_search
def search(query, index=None, doc_type=None, raw=None):
index = index or settings.ELASTIC_INDEX
return search_engine.search(query, index=index, doc_type=doc_type, raw=raw)
@requires_search
def METHOD_NAME(node, index=None, bulk=False, async_update=True, saved_fields=None):
kwargs = {
'index': index,
'bulk': bulk
}
if async_update:
node_id = node._id
# We need the transaction to be committed before trying to run celery tasks.
# For example, when updating a Node's privacy, is_public must be True in the
# database in order for method that updates the Node's elastic search document
# to run correctly.
if settings.USE_CELERY:
enqueue_task(search_engine.update_node_async.s(node_id=node_id, **kwargs))
else:
search_engine.update_node_async(node_id=node_id, **kwargs)
else:
index = index or settings.ELASTIC_INDEX
return search_engine.METHOD_NAME(node, **kwargs)
@requires_search
def update_preprint(preprint, index=None, bulk=False, async_update=True, saved_fields=None):
kwargs = {
'index': index,
'bulk': bulk
}
if async_update:
preprint_id = preprint._id
# We need the transaction to be committed before trying to run celery tasks.
if settings.USE_CELERY:
enqueue_task(search_engine.update_preprint_async.s(preprint_id=preprint_id, **kwargs))
else:
search_engine.update_preprint_async(preprint_id=preprint_id, **kwargs)
else:
index = index or settings.ELASTIC_INDEX
return search_engine.update_preprint(preprint, **kwargs)
@requires_search
def update_group(group, index=None, bulk=False, async_update=True, saved_fields=None, deleted_id=None):
kwargs = {
'index': index,
'bulk': bulk,
'deleted_id': deleted_id
}
if async_update:
# We need the transaction to be committed before trying to run celery tasks.
if settings.USE_CELERY:
enqueue_task(search_engine.update_group_async.s(group_id=group._id, **kwargs))
else:
search_engine.update_group_async(group_id=group._id, **kwargs)
else:
index = index or settings.ELASTIC_INDEX
return search_engine.update_group(group, **kwargs)
@requires_search
def bulk_update_nodes(serialize, nodes, index=None, category=None):
index = index or settings.ELASTIC_INDEX
search_engine.bulk_update_nodes(serialize, nodes, index=index, category=category)
@requires_search
def delete_node(node, index=None):
index = index or settings.ELASTIC_INDEX
doc_type = node.project_or_component
if node.is_registration:
doc_type = 'registration'
search_engine.delete_doc(node._id, node, index=index, category=doc_type)
@requires_search
def update_contributors_async(user_id):
"""Async version of update_contributors above"""
if settings.USE_CELERY:
enqueue_task(search_engine.update_contributors_async.s(user_id))
else:
search_engine.update_contributors_async(user_id)
@requires_search
def update_user(user, index=None, async_update=True):
index = index or settings.ELASTIC_INDEX
if async_update:
user_id = user.id
if settings.USE_CELERY:
enqueue_task(search_engine.update_user_async.s(user_id, index=index))
else:
search_engine.update_user_async(user_id, index=index)
else:
search_engine.update_user(user, index=index)
@requires_search
def update_file(file_, index=None, delete=False):
index = index or settings.ELASTIC_INDEX
search_engine.update_file(file_, index=index, delete=delete)
@requires_search
def update_institution(institution, index=None):
index = index or settings.ELASTIC_INDEX
search_engine.update_institution(institution, index=index)
@requires_search
def update_collected_metadata(node_id, collection_id=None, index=None, op='update'):
index = index or settings.ELASTIC_INDEX
if settings.USE_CELERY:
enqueue_task(
search_engine.update_collection_submission_async.s(
node_id,
collection_id=collection_id,
op=op,
index=index
)
)
else:
search_engine.update_collection_submission_async(
node_id,
collection_id=collection_id,
op=op,
index=index
)
@requires_search
def bulk_update_collection_submissions(collection_submissions, op='update', index=None):
index = index or settings.ELASTIC_INDEX
search_engine.bulk_update_collection_submission(collection_submissions, op=op, index=index)
@requires_search
def delete_all():
search_engine.delete_all()
@requires_search
def delete_index(index):
search_engine.delete_index(index)
@requires_search
def create_index(index=None):
index = index or settings.ELASTIC_INDEX
search_engine.create_index(index=index)
@requires_search
def search_contributor(query, page=0, size=10, exclude=None, current_user=None):
exclude = exclude or []
result = search_engine.search_contributor(query=query, page=page, size=size,
exclude=exclude, current_user=current_user)
return result | null |
1,189 | import logging
from ..base.twilltestcase import (
common,
ShedTwillTestCase,
)
log = logging.getLogger(__name__)
repository_name = "filtering_1470"
repository_description = "Galaxy's filtering tool"
repository_long_description = "Long description of Galaxy's filtering tool"
category_name = "Test 1470 - Updating Installed Repositories"
category_description = (
"Functional test suite to ensure that updating installed repositories does not create white ghosts."
)
"""
1. Install a repository into Galaxy.
2. In the Tool Shed, update the repository from Step 1.
3. In Galaxy, get updates to the repository.
4. In Galaxy, uninstall the repository.
5. In Galaxy, reinstall the repository.
6. Make sure step 5 created no white ghosts.
"""
class TestUpdateInstalledRepository(ShedTwillTestCase):
"""Verify that the code correctly handles updating an installed repository, then uninstalling and reinstalling."""
def test_0000_initiate_users(self):
"""Create necessary user accounts."""
self.login(email=common.test_user_1_email, username=common.test_user_1_name)
self.login(email=common.admin_email, username=common.admin_username)
def test_0005_create_filtering_repository(self):
"""Create and populate the filtering_0530 repository."""
category = self.create_category(name=category_name, description=category_description)
self.login(email=common.test_user_1_email, username=common.test_user_1_name)
repository = self.get_or_create_repository(
name=repository_name,
description=repository_description,
long_description=repository_long_description,
owner=common.test_user_1_name,
category=category,
strings_displayed=[],
)
self.upload_file(
repository,
filename="filtering/filtering_1.1.0.tar",
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message="Uploaded filtering 1.1.0 tarball.",
strings_displayed=[],
strings_not_displayed=[],
)
def test_0010_install_filtering_to_galaxy(self):
"""Install the filtering_1470 repository to galaxy.
This is step 1 - Install a repository into Galaxy.
"""
self.galaxy_login(email=common.admin_email, username=common.admin_username)
self._install_repository(
repository_name,
common.test_user_1_name,
category_name,
install_tool_dependencies=False,
install_repository_dependencies=False,
new_tool_panel_section_label="Filtering",
)
self._assert_has_installed_repos_with_names("filtering_1470")
def test_0015_update_repository(self):
"""Upload a readme file to the filtering_1470 repository.
This is step 2 - In the Tool Shed, update the repository from Step 1.
Importantly, this update should *not* create a new installable changeset revision, because that would
eliminate the process we're testing in this script. So, we upload a readme file.
"""
self.login(email=common.test_user_1_email, username=common.test_user_1_name)
repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name)
self.upload_file(
repository,
filename="filtering/readme.txt",
filepath=None,
valid_tools_only=True,
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message="Uploaded readme.",
strings_displayed=[],
strings_not_displayed=[],
)
def test_0020_get_repository_updates(self):
"""Get updates to the installed repository.
This is step 3 - In Galaxy, get updates to the repository.
"""
self.galaxy_login(email=common.admin_email, username=common.admin_username)
installed_repository = self.test_db_util.get_installed_repository_by_name_owner(
repository_name, common.test_user_1_name
)
self.update_installed_repository_api(installed_repository)
def METHOD_NAME(self):
"""Uninstall the filtering_1470 repository.
This is step 4 - In Galaxy, uninstall the repository.
"""
installed_repository = self.test_db_util.get_installed_repository_by_name_owner(
repository_name, common.test_user_1_name
)
self.uninstall_repository(installed_repository)
def test_0030_reinstall_repository(self):
"""Reinstall the filtering_1470 repository.
This is step 5 - In Galaxy, reinstall the repository.
"""
installed_repository = self.test_db_util.get_installed_repository_by_name_owner(
repository_name, common.test_user_1_name
)
self.reinstall_repository_api(installed_repository)
def test_0035_verify_absence_of_ghosts(self):
"""Check the count of repositories in the database named filtering_1470 and owned by user1.
This is step 6 - Make sure step 5 created no white ghosts.
"""
installed_repository = self.test_db_util.get_installed_repository_by_name_owner(
repository_name, common.test_user_1_name, return_multiple=True
)
assert (
len(installed_repository) == 1
), 'Multiple filtering repositories found in the Galaxy database, possibly indicating a "white ghost" scenario.' | null |
1,190 | # Drakkar-Software OctoBot-Interfaces
# Copyright (c) Drakkar-Software, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import flask
import octobot_commons.logging as commons_logging
import octobot_commons.authentication as authentication
import tentacles.Services.Interfaces.web_interface.util as util
import tentacles.Services.Interfaces.web_interface.login as login
import tentacles.Services.Interfaces.web_interface.models as models
import tentacles.Services.Interfaces.web_interface.flask_util as flask_util
import octobot.automation as bot_automation
import octobot.constants as constants
def register(blueprint):
@blueprint.route("/automations", methods=["POST", "GET"])
@login.login_required_when_activated
def automations():
if not models.are_automations_enabled():
return flask.redirect(flask.url_for("home"))
if flask.request.method == 'POST':
action = flask.request.args.get("action")
success = True
response = ""
tentacle_name = bot_automation.Automation.get_name()
tentacle_class = bot_automation.Automation
restart = False
if action == "save":
request_data = flask.request.get_json()
success, response = models.update_tentacle_config(
tentacle_name,
request_data,
tentacle_class=tentacle_class
)
if action == "start":
restart = True
elif action == "factory_reset":
success, response = models.reset_automation_config_to_default()
restart = True
if restart:
models.restart_global_automations()
if success:
return util.get_rest_reply(flask.jsonify(response))
else:
return util.get_rest_reply(response, 500)
display_intro = flask_util.BrowsingDataProvider.instance().get_and_unset_is_first_display(
flask_util.BrowsingDataProvider.AUTOMATIONS
)
all_events, all_conditions, all_actions = models.get_all_automation_steps()
form_to_display = constants.AUTOMATION_FEEDBACK_FORM_ID
try:
user_id = models.get_user_account_id()
display_feedback_form = models.has_at_least_one_running_automation() and not models.has_filled_form(form_to_display)
except authentication.AuthenticationRequired:
# no authenticated user: don't display form
user_id = None
display_feedback_form = False
return flask.render_template(
'automations.html',
profile_name=models.get_current_profile().name,
events=all_events,
conditions=all_conditions,
actions=all_actions,
display_intro=display_intro,
user_id=user_id,
form_to_display=form_to_display,
display_feedback_form=display_feedback_form,
)
@blueprint.route('/automations_edit_details')
@login.login_required_when_activated
def METHOD_NAME():
if not models.are_automations_enabled():
return flask.redirect(flask.url_for("home"))
try:
return util.get_rest_reply(
models.get_tentacle_config_and_edit_display(
bot_automation.Automation.get_name(),
tentacle_class=bot_automation.Automation
)
)
except Exception as e:
commons_logging.get_logger("automations_edit_details").exception(e)
return util.get_rest_reply(str(e), 500) | null |
1,191 | """
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
from logging import getLogger
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = 'https'
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split('\\', 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def METHOD_NAME(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
self.num_connections, self.host, self.authurl)
headers = {'Connection': 'Keep-Alive'}
req_header = 'Authorization'
resp_header = 'www-authenticate'
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = (
'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', reshdr)
log.debug('Response data: %s [...]', res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(', ')
auth_header_value = None
for s in auth_header_values:
if s[:5] == 'NTLM ':
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception('Unexpected %s response header: %s' %
(resp_header, reshdr[resp_header]))
# Send authentication message
ServerChallenge, NegotiateFlags = \
ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
self.user,
self.domain,
self.pw,
NegotiateFlags)
headers[req_header] = 'NTLM %s' % auth_msg
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', dict(res.getheaders()))
log.debug('Response data: %s [...]', res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception('Server rejected request: wrong '
'username or password')
raise Exception('Wrong server response: %s %s' %
(res.status, res.reason))
res.fp = None
log.debug('Connection established')
return conn
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True):
if headers is None:
headers = {}
headers['Connection'] = 'Keep-Alive'
return super(NTLMConnectionPool, self).urlopen(method, url, body,
headers, retries,
redirect,
assert_same_host) | null |
1,192 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkclickhouse.endpoint import endpoint_data
class CreateDBInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'clickhouse', '2019-11-11', 'CreateDBInstance')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DBClusterDescription(self): # String
return self.get_query_params().get('DBClusterDescription')
def set_DBClusterDescription(self, DBClusterDescription): # String
self.add_query_param('DBClusterDescription', DBClusterDescription)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_SourceDBClusterId(self): # String
return self.get_query_params().get('SourceDBClusterId')
def set_SourceDBClusterId(self, SourceDBClusterId): # String
self.add_query_param('SourceDBClusterId', SourceDBClusterId)
def get_DbNodeStorageType(self): # String
return self.get_query_params().get('DbNodeStorageType')
def set_DbNodeStorageType(self, DbNodeStorageType): # String
self.add_query_param('DbNodeStorageType', DbNodeStorageType)
def get_DBClusterCategory(self): # String
return self.get_query_params().get('DBClusterCategory')
def set_DBClusterCategory(self, DBClusterCategory): # String
self.add_query_param('DBClusterCategory', DBClusterCategory)
def get_EncryptionType(self): # String
return self.get_query_params().get('EncryptionType')
def set_EncryptionType(self, EncryptionType): # String
self.add_query_param('EncryptionType', EncryptionType)
def get_DBClusterNetworkType(self): # String
return self.get_query_params().get('DBClusterNetworkType')
def set_DBClusterNetworkType(self, DBClusterNetworkType): # String
self.add_query_param('DBClusterNetworkType', DBClusterNetworkType)
def get_Period(self): # String
return self.get_query_params().get('Period')
def set_Period(self, Period): # String
self.add_query_param('Period', Period)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_DBClusterVersion(self): # String
return self.get_query_params().get('DBClusterVersion')
def set_DBClusterVersion(self, DBClusterVersion): # String
self.add_query_param('DBClusterVersion', DBClusterVersion)
def get_DBClusterClass(self): # String
return self.get_query_params().get('DBClusterClass')
def set_DBClusterClass(self, DBClusterClass): # String
self.add_query_param('DBClusterClass', DBClusterClass)
def get_BackupSetID(self): # String
return self.get_query_params().get('BackupSetID')
def set_BackupSetID(self, BackupSetID): # String
self.add_query_param('BackupSetID', BackupSetID)
def get_EncryptionKey(self): # String
return self.get_query_params().get('EncryptionKey')
def set_EncryptionKey(self, EncryptionKey): # String
self.add_query_param('EncryptionKey', EncryptionKey)
def METHOD_NAME(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBNodeGroupCount(self): # String
return self.get_query_params().get('DBNodeGroupCount')
def set_DBNodeGroupCount(self, DBNodeGroupCount): # String
self.add_query_param('DBNodeGroupCount', DBNodeGroupCount)
def get_UsedTime(self): # String
return self.get_query_params().get('UsedTime')
def set_UsedTime(self, UsedTime): # String
self.add_query_param('UsedTime', UsedTime)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_DBNodeStorage(self): # String
return self.get_query_params().get('DBNodeStorage')
def set_DBNodeStorage(self, DBNodeStorage): # String
self.add_query_param('DBNodeStorage', DBNodeStorage)
def get_VPCId(self): # String
return self.get_query_params().get('VPCId')
def set_VPCId(self, VPCId): # String
self.add_query_param('VPCId', VPCId)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType) | null |
1,193 | import pytest
from lark.exceptions import UnexpectedToken
from pharmpy.model import ModelSyntaxError
def test_data_filename_get(parser):
record = parser.parse('$DATA "pheno.dta"').records[0]
assert record.filename == 'pheno.dta'
record = parser.parse('$DATA /home/full/pheno.dta').records[0]
assert record.filename == '/home/full/pheno.dta'
record = parser.parse("$DATA 'pheno.dta'").records[0]
assert str(record.root.subtree('filename')) == "'pheno.dta'"
assert record.filename == "pheno.dta"
record = parser.parse(r'$DATA "C:\windowspath\with space in.csv"').records[0]
assert record.filename == r'C:\windowspath\with space in.csv'
record = parser.parse('$DATA \n pheno.dta \n; comment\n').records[0]
assert record.filename == 'pheno.dta'
record = parser.parse('$DATA ; comment\n ; some comment line\n pheno.dta\n\n').records[0]
assert record.filename == 'pheno.dta'
def METHOD_NAME(parser):
record = parser.parse('$DATA DUMMY ; comment').records[0]
assert record.filename == 'DUMMY'
assert str(record) == '$DATA DUMMY ; comment'
# simple replace
record = record.set_filename('/new/path/to_file.txt')
assert record.filename == '/new/path/to_file.txt'
assert str(record) == '$DATA /new/path/to_file.txt ; comment'
# force quoting
record = record.set_filename('MUST=QUOTE')
assert record.filename == 'MUST=QUOTE'
assert str(record) == "$DATA 'MUST=QUOTE' ; comment"
# more complex example
text = '$DATA ; comment\n ; some comment line\n pheno.dta\n\n'
record = parser.parse(text).records[0]
assert record.filename == 'pheno.dta'
assert str(record) == text
# more complex replace
record = record.set_filename("'IGNORE'")
assert record.filename == "'IGNORE'"
assert str(record) == text.replace('pheno.dta', '"\'IGNORE\'"')
# *
record = parser.parse('$DATA DUMMY ; comment').records[0]
record = record.set_filename(None)
assert str(record) == '$DATA * ; comment'
def test_option_record(parser):
record = parser.parse('$DATA pheno.dta NOWIDE').records[0]
assert record.option_pairs == {'NOWIDE': None}
def test_ignore_character(parser):
record = parser.parse('$DATA pheno.dta').records[0]
assert record.ignore_character is None
record = record.set_ignore_character('I')
assert record.ignore_character == 'I'
record = parser.parse('$DATA pheno.dta IGNORE=@').records[0]
assert record.filename == 'pheno.dta'
assert record.ignore_character == '@'
record = record.set_ignore_character('K')
assert record.ignore_character == 'K'
record = parser.parse('$DATA pheno.dta IGNORE="I"').records[0]
assert record.ignore_character == 'I'
record = parser.parse('$DATA pheno.dta IGNORE=\'"\'').records[0]
assert record.ignore_character == '"'
record = parser.parse('$DATA pheno.dta IGNORE=K IGNORE=(ID.EQ.2)').records[0]
assert record.ignore_character == 'K'
record = parser.parse('$DATA pheno.dta IGNORE=(DV==3) IGNORE=C').records[0]
assert record.ignore_character == 'C'
record = record.set_ignore_character('@')
assert record.ignore_character == '@'
assert str(record.ignore[0]) == 'DV==3'
record = parser.parse('$DATA pheno.dta IGNORE=,').records[0]
assert record.ignore_character == ','
record = parser.parse('$DATA pheno.dta IGNORE="').records[0]
assert record.ignore_character == '"'
record = record.set_ignore_character('"')
assert record.ignore_character == '"'
assert str(record) == '$DATA pheno.dta IGNORE="'
with pytest.raises(UnexpectedToken):
record = parser.parse('$DATA pheno.dta IGNORE=""').records[0]
record = parser.parse('$DATA pheno.dta IGNORE=c IGNORE=@').records[0]
with pytest.raises(ModelSyntaxError):
record.ignore_character
def test_ignore_character_from_header(parser):
record = parser.parse('$DATA pheno.dta').records[0]
assert record.ignore_character is None
record = record.set_ignore_character_from_header("ID")
assert record.ignore_character == '@'
record = record.set_ignore_character_from_header("_ID")
assert record.ignore_character == '_'
def test_null_value(parser):
record = parser.parse('$DATA pheno.dta NULL=1').records[0]
assert record.null_value == 1
record = parser.parse('$DATA pheno.dta NULL=+').records[0]
assert record.null_value == 0
def test_ignore_accept(parser):
record = parser.parse('$DATA pheno.dta IGNORE=(DV.EQ.1)').records[0]
assert str(record.ignore[0]) == 'DV.EQ.1'
assert record.accept == []
record = record.remove_ignore()
assert record.ignore == []
assert record.accept == []
record = parser.parse('$DATA pheno.dta ACCEPT=(DV.EQ.1, MDV.NEN.23)').records[0]
assert str(record.accept[0]) == 'DV.EQ.1'
assert str(record.accept[1]) == 'MDV.NEN.23'
assert record.ignore == []
record = record.remove_accept()
assert record.ignore == []
assert record.accept == []
record = parser.parse('$DATA pheno.dta IGNORE=(WGT < 1 ,\n ID\n.EQ."lk")').records[0]
assert str(record.ignore[0]) == 'WGT < 1', 'ID\n.EQ."lk"'
record = parser.parse('$DATA ../pheno.dta IGNORE=@ IGNORE(APGR.GT.23)\n').records[0]
record = record.remove_ignore().remove_accept()
assert str(record) == '$DATA ../pheno.dta IGNORE=@ \n'
def test_comments(parser):
record = parser.parse('$DATA pheno.dta IGNORE=@;MYCOMMENT').records[0]
assert str(record) == '$DATA pheno.dta IGNORE=@;MYCOMMENT'
def test_data_infile(parser):
record = parser.parse('$INFILE pheno.dta').records[0]
assert record.name == 'DATA'
assert record.filename == 'pheno.dta'
assert record.raw_name == '$INFILE'
def test_comment(parser):
contents = r"""$DATA cpt7.dta IGNORE= #
; Dataset
"""
record = parser.parse(contents).records[0]
record = record.set_ignore_character("A")
assert str(record) == '$DATA cpt7.dta \n; Dataset\nIGNORE=A\n' | null |
1,194 | """Use the Python pygments library to perform extra checks on C++ grammar."""
from pygments import token
from pygments.lexers.compiled import CppLexer
import os
def check_header_file(fh_name, project_name, errors):
"""Check a single C++ header file"""
METHOD_NAME(fh_name, project_name, True, errors)
def check_cpp_file(fh_name, project_name, errors):
"""Check a single C++ source file"""
METHOD_NAME(fh_name, project_name, False, errors)
def METHOD_NAME(fh_name, project_name, header, errors):
fh, filename = fh_name
s = tokenize_file(fh)
check_tokens(s, filename, project_name, header, errors)
def tokenize_file(fh):
"""Use the Python pygments library to tokenize a C++ file"""
code = fh.read()
c = CppLexer()
scan = []
for (index, tok, value) in c.get_tokens_unprocessed(code):
scan.append((tok, value))
return scan
def check_tokens(scan, filename, project_name, header, errors):
if filename.find("test_") == -1:
# we don't do it for python tests
check_comment_header(scan, filename, errors)
if header:
# Handle older versions of pygments which concatenate \n and # tokens
if len(scan) >= 3 and scan[2][0] == token.Comment.Preproc \
and scan[2][1] == '\n#':
scan[2] = (token.Comment.Preproc, '#')
scan.insert(2, (token.Comment.Text, '\n'))
check_header_start_end(scan, filename, project_name, errors)
def check_comment_header(scan, filename, errors):
if len(scan) < 1 or scan[0][0] not in (token.Comment,
token.Comment.Multiline):
errors.append('%s:1: First line should be a comment with a copyright '
'notice and a description of the file' % filename)
def have_header_guard(scan):
return len(scan) >= 11 \
and scan[4][0] == token.Comment.Preproc \
and scan[4][1].startswith('ifndef') \
and scan[7][0] == token.Comment.Preproc \
and scan[7][1].startswith('define') \
and scan[-3][0] == token.Comment.Preproc \
and scan[-3][1].startswith('endif') \
and scan[-2][0] in (token.Comment, token.Comment.Multiline)
def get_header_guard(filename, project_name):
"""Get prefix and suffix for header guard"""
guard_prefix = project_name.replace(".", "").upper()
guard_suffix = os.path.split(filename)[1].replace(".", "_").upper()
return guard_prefix, guard_suffix
def check_header_start_end(scan, filename, project_name, errors):
guard_prefix, guard_suffix = get_header_guard(filename, project_name)
header_guard = guard_prefix + '_' + guard_suffix
if len(scan) < 11:
bad = True
else:
bad = False
if not scan[4][0] == token.Comment.Preproc:
bad = True
if not scan[4][1].startswith('ifndef'):
errors.append('%s:%d: Header guard missing #ifndef.'
% (filename, 1))
bad = True
if not scan[7][0] == token.Comment.Preproc:
bad = True
if not scan[7][1].startswith('define'):
errors.append('%s:%d: Header guard missing #define.'
% (filename, 1))
bad = True
if not scan[-3][0] == token.Comment.Preproc \
and not scan[-4][0] == token.Comment.Preproc:
bad = True
if not scan[-3][1].startswith('endif') \
and not scan[-4][1].startswith('endif'):
errors.append('%s:%d: Header guard missing #endif.'
% (filename, 1))
bad = True
if not scan[-2][0] in (token.Comment, token.Comment.Multiline) \
and not scan[-3][0] in (token.Comment, token.Comment.Multiline):
errors.append('%s:%d: Header guard missing closing comment.'
% (filename, 1))
bad = True
guard = scan[4][1][7:]
if not guard.startswith(guard_prefix):
errors.append('%s:%d: Header guard does not start with "%s".'
% (filename, 1, guard_prefix))
bad = True
if not guard.replace("_", "").endswith(guard_suffix.replace("_", "")):
errors.append('%s:%d: Header guard does not end with "%s".'
% (filename, 1, guard_suffix))
bad = True
if not scan[7][1] == 'define ' + guard:
errors.append('%s:%d: Header guard does not define "%s".'
% (filename, 1, guard))
bad = True
if not scan[-2][1] == '/* %s */' % guard \
and not scan[-3][1] == '/* %s */' % guard:
errors.append('%s:%d: Header guard close does not have a '
'comment of "/* %s */".' % (filename, 1, guard))
bad = True
if bad:
errors.append('%s:%d: Missing or incomplete header guard.'
% (filename, 1) + """
Header files should start with a comment, then a blank line, then the rest
of the file wrapped with a header guard. This must start with %s
and end with %s - in between can be placed extra qualifiers, e.g. for a
namespace. For example,
/** Copyright and file description */
#ifndef %s
#define %s
...
#endif /* %s */
""" % (guard_prefix, guard_suffix, header_guard, header_guard, header_guard)) | null |
1,195 | #-*- coding: utf-8 -*-
from vi import utils
from vi.widgets import ListWidget, EditWidget
from vi.priorityqueue import actionDelegateSelector, ModuleWidgetSelector
from flare.i18n import translate
from vi.config import conf
from vi.pane import Pane
from flare.button import Button
class ContextAction(Button):
def __init__(self, module, handler, actionName, *args, **kwargs):
dsc = actionName.split(".", 3)
assert dsc[0] == "context", u"Invalid definition!"
mod = dsc[1]
vars = dsc[2].split(",")
assert mod in conf["modules"], "The module '%s' must provide an adminInfo when run in a context action"
adminInfo = conf["modules"][mod]
if "name" in adminInfo:
title = adminInfo["name"]
else:
title = mod
icon = adminInfo.get("icon")
super(ContextAction, self).__init__(text=title, icon=icon)
self.widget = None
self.adminInfo = adminInfo
self.contextModule = mod
self.contextVariables = vars
self.title = title
self.filter = filter
self.icon = icon
self.addClass("context-%s" % self.contextModule)
self["class"].extend(["bar-item","btn--small"])
self.disable()
def onAttach(self):
super(ContextAction, self).onAttach()
self.widget = self.parent().parent()
if isinstance(self.widget, ListWidget):
self.widget.selectionChangedEvent.register(self)
elif isinstance(self.widget, EditWidget) and self.widget.mode == "edit":
self.enable()
def onDetach(self):
if isinstance(self.widget, ListWidget):
self.widget.selectionChangedEvent.unregister(self)
super(ContextAction, self).onDetach()
def METHOD_NAME(self, table, selection, *args, **kwargs):
if len(selection) > 0:
self.enable()
else:
self.disable()
def onClick(self, sender=None):
assert self.widget, u"This action must be attached first!"
if isinstance(self.widget, ListWidget):
for s in self.widget.getCurrentSelection():
self.openModule(s)
elif isinstance(self.widget, EditWidget):
d = self.widget.serializeForDocument()
self.openModule(d)
def openModule(self, data, title=None):
# Generate title
if title is None:
for key in conf["vi.context.title.bones"]:
if title := data.get(key):
if isinstance(title, dict) and conf["flare.language.current"] in title:
title = title[conf["flare.language.current"]]
break
# Merge contexts
context = {}
context.update(self.widget.context or {})
context.update(self.adminInfo.get("context", {}))
# Evaluate context variables
for var in self.contextVariables:
if "=" in var:
key, value = var.split("=", 1)
if value[0] == "$":
value = data.get(value[1:])
else:
key = var
value = data.get("key")
context[key] = value
# Open a new view for the context module
conf["mainWindow"].openView(
translate("{{module}} - {{name}}", module=self.title, name=title),
self.adminInfo.get("icon") or "icon-edit",
self.contextModule + self.adminInfo["handler"],
self.contextModule,
None, # is not used...
data=utils.mergeDict(self.adminInfo, {"context": context}),
target="popup" if self.parent().parent().isSelector else "mainNav"
)
# OLD VERSION OPENS THE HANDLER DIRECTLY IN A POPUP.
# # Have a handler?
# assert (widgen := ModuleWidgetSelector.select(self.contextModule, self.adminInfo))
#
# #print(widgen, context, utils.mergeDict(self.adminInfo, {"context": context}))
# widget = widgen(self.contextModule, **utils.mergeDict(self.adminInfo, {"context": context}))
#
# if widget:
# widget.isSelector = True # this is done so that subsequent views are stacked in Popups...
#
# conf["mainWindow"].stackWidget(
# widget,
# title=translate("{{module}} - {{name}}", module=self.title, name=title),
# icon=self.adminInfo.get("icon")
# )
#
# else:
# print("Widget could not be generated")
@staticmethod
def isSuitableFor(module, handler, actionName):
if module is None or module not in conf["modules"].keys():
return False
if not actionName.startswith("context."):
return False
mod = actionName.split(".", 3)[1]
cuser = conf["currentUser"]
return "root" in cuser["access"] or ("%s-view" % mod) in cuser["access"]
actionDelegateSelector.insert(1, ContextAction.isSuitableFor, ContextAction) | null |
1,196 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkimm.endpoint import endpoint_data
import json
class GenerateWebofficeTokenRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'imm', '2020-09-30', 'GenerateWebofficeToken','imm')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Referer(self): # String
return self.get_query_params().get('Referer')
def set_Referer(self, Referer): # String
self.add_query_param('Referer', Referer)
def get_Notification(self): # Struct
return self.get_query_params().get('Notification')
def METHOD_NAME(self, Notification): # Struct
self.add_query_param("Notification", json.dumps(Notification))
def get_Password(self): # String
return self.get_query_params().get('Password')
def set_Password(self, Password): # String
self.add_query_param('Password', Password)
def get_ProjectName(self): # String
return self.get_query_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_query_param('ProjectName', ProjectName)
def get_Watermark(self): # Struct
return self.get_query_params().get('Watermark')
def set_Watermark(self, Watermark): # Struct
self.add_query_param("Watermark", json.dumps(Watermark))
def get_NotifyTopicName(self): # String
return self.get_query_params().get('NotifyTopicName')
def set_NotifyTopicName(self, NotifyTopicName): # String
self.add_query_param('NotifyTopicName', NotifyTopicName)
def get_Filename(self): # String
return self.get_query_params().get('Filename')
def set_Filename(self, Filename): # String
self.add_query_param('Filename', Filename)
def get_SourceURI(self): # String
return self.get_query_params().get('SourceURI')
def set_SourceURI(self, SourceURI): # String
self.add_query_param('SourceURI', SourceURI)
def get_ExternalUploaded(self): # Boolean
return self.get_query_params().get('ExternalUploaded')
def set_ExternalUploaded(self, ExternalUploaded): # Boolean
self.add_query_param('ExternalUploaded', ExternalUploaded)
def get_UserData(self): # String
return self.get_query_params().get('UserData')
def set_UserData(self, UserData): # String
self.add_query_param('UserData', UserData)
def get_PreviewPages(self): # Long
return self.get_query_params().get('PreviewPages')
def set_PreviewPages(self, PreviewPages): # Long
self.add_query_param('PreviewPages', PreviewPages)
def get_Hidecmb(self): # Boolean
return self.get_query_params().get('Hidecmb')
def set_Hidecmb(self, Hidecmb): # Boolean
self.add_query_param('Hidecmb', Hidecmb)
def get_CachePreview(self): # Boolean
return self.get_query_params().get('CachePreview')
def set_CachePreview(self, CachePreview): # Boolean
self.add_query_param('CachePreview', CachePreview)
def get_Permission(self): # Struct
return self.get_query_params().get('Permission')
def set_Permission(self, Permission): # Struct
self.add_query_param("Permission", json.dumps(Permission))
def get_CredentialConfig(self): # Struct
return self.get_query_params().get('CredentialConfig')
def set_CredentialConfig(self, CredentialConfig): # Struct
self.add_query_param("CredentialConfig", json.dumps(CredentialConfig))
def get_User(self): # Struct
return self.get_query_params().get('User')
def set_User(self, User): # Struct
self.add_query_param("User", json.dumps(User)) | null |
1,197 | from __future__ import annotations
from collections.abc import Hashable, Mapping, Sequence
from typing import Any
import pytest
import dask
import dask.threaded
from dask.base import DaskMethodsMixin, dont_optimize, tokenize
from dask.context import globalmethod
from dask.delayed import Delayed, delayed
from dask.typing import (
DaskCollection,
Graph,
HLGDaskCollection,
Key,
NestedKeys,
PostComputeCallable,
PostPersistCallable,
)
try:
from IPython.display import DisplayObject
except ImportError:
DisplayObject = Any
da = pytest.importorskip("dask.array")
db = pytest.importorskip("dask.bag")
dds = pytest.importorskip("dask.datasets")
dd = pytest.importorskip("dask.dataframe")
def finalize(x: Sequence[Any]) -> Any:
return x[0]
def get1(dsk: Mapping, keys: Sequence[Key] | Key, **kwargs: Any) -> Any:
return dask.threaded.get(dsk, keys, **kwargs)
def get2(dsk: Mapping, keys: Sequence[Key] | Key, **kwargs: Any) -> Any:
return dask.get(dsk, keys, **kwargs)
class Inheriting(DaskCollection):
def __init__(self, based_on: DaskCollection) -> None:
self.based_on = based_on
def __dask_graph__(self) -> Graph:
return self.based_on.__dask_graph__()
def __dask_keys__(self) -> NestedKeys:
return self.based_on.__dask_keys__()
def __dask_postcompute__(self) -> tuple[PostComputeCallable, tuple]:
return finalize, ()
def __dask_postpersist__(self) -> tuple[PostPersistCallable, tuple]:
return self.based_on.__dask_postpersist__()
def __dask_tokenize__(self) -> Hashable:
return tokenize(self.based_on)
__dask_scheduler__ = staticmethod(dask.threaded.get)
__dask_optimize__ = globalmethod(
dont_optimize,
key="hlgcollection_optim",
falsey=dont_optimize,
)
def compute(self, **kwargs) -> Any:
return dask.compute(self, **kwargs)
def persist(self, **kwargs) -> Inheriting:
return Inheriting(self.based_on.persist(**kwargs))
def visualize(
self,
filename: str = "mydask",
format: str | None = None,
optimize_graph: bool = False,
**kwargs: Any,
) -> DisplayObject | None:
return dask.visualize(
self,
filename=filename,
format=format,
optimize_graph=optimize_graph,
**kwargs,
)
class HLGCollection(DaskMethodsMixin):
def __init__(self, based_on: HLGDaskCollection) -> None:
self.based_on = based_on
def __dask_graph__(self) -> Graph:
return self.based_on.__dask_graph__()
def __dask_layers__(self) -> Sequence[str]:
return self.based_on.__dask_layers__()
def __dask_keys__(self) -> NestedKeys:
return self.based_on.__dask_keys__()
def __dask_postcompute__(self) -> tuple[PostComputeCallable, tuple]:
return finalize, ()
def __dask_postpersist__(self) -> tuple[PostPersistCallable, tuple]:
return self.based_on.__dask_postpersist__()
def __dask_tokenize__(self) -> Hashable:
return tokenize(self.based_on)
__dask_scheduler__ = staticmethod(get1)
__dask_optimize__ = globalmethod(
dont_optimize,
key="hlgcollection_optim",
falsey=dont_optimize,
)
class NotHLGCollection(DaskMethodsMixin):
def __init__(self, based_on: DaskCollection) -> None:
self.based_on = based_on
def __dask_graph__(self) -> Graph:
return self.based_on.__dask_graph__()
def __dask_keys__(self) -> NestedKeys:
return self.based_on.__dask_keys__()
def __dask_postcompute__(self) -> tuple[PostComputeCallable, tuple]:
return finalize, ()
def __dask_postpersist__(self) -> tuple[PostPersistCallable, tuple]:
return self.based_on.__dask_postpersist__()
def __dask_tokenize__(self) -> Hashable:
return tokenize(self.based_on)
__dask_scheduler__ = staticmethod(get2)
__dask_optimize__ = globalmethod(
dont_optimize,
key="collection_optim",
falsey=dont_optimize,
)
def METHOD_NAME(x: int) -> int:
return x + 1
increment: Delayed = delayed(METHOD_NAME)
def assert_isinstance(coll: DaskCollection, protocol: Any) -> None:
assert isinstance(coll, protocol)
@pytest.mark.parametrize("protocol", [DaskCollection, HLGDaskCollection])
def test_isinstance_core(protocol):
arr = da.ones(10)
bag = db.from_sequence([1, 2, 3, 4, 5], npartitions=2)
df = dds.timeseries()
dobj = increment(2)
assert_isinstance(arr, protocol)
assert_isinstance(bag, protocol)
assert_isinstance(df, protocol)
assert_isinstance(dobj, protocol)
def test_isinstance_custom() -> None:
a = da.ones(10)
hlgc = HLGCollection(a)
nhlgc = NotHLGCollection(a)
assert isinstance(hlgc, DaskCollection)
assert isinstance(nhlgc, DaskCollection)
assert isinstance(nhlgc, DaskCollection)
assert not isinstance(nhlgc, HLGDaskCollection)
def compute(coll: DaskCollection) -> Any:
return coll.compute()
def compute2(coll: DaskCollection) -> Any:
return coll.compute()
def test_parameter_passing() -> None:
from dask.array import Array
a: Delayed = increment(2)
hlgc = HLGCollection(a)
assert compute(hlgc) == 3
assert compute2(hlgc) == 3
d: Delayed = increment(3)
assert compute(d) == 4
assert compute2(d) == 4
array: Array = da.ones(10)
assert compute(array).shape == (10,)
assert compute2(array).shape == (10,)
def test_inheriting_class() -> None:
inheriting: Inheriting = Inheriting(increment(2))
assert isinstance(inheriting, Inheriting) | null |
1,198 | # coding=utf-8
# Copyright 2018-2023 EvaDB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mock import patch
from evadb.constants import NO_GPU
from evadb.executor.execution_context import Context
class ExecutionContextTest(unittest.TestCase):
@patch("evadb.executor.execution_context.ConfigurationManager")
@patch("evadb.executor.execution_context.get_gpu_count")
@patch("evadb.executor.execution_context.is_gpu_available")
def test_CUDA_VISIBLE_DEVICES_gets_populated_from_config(
self, gpu_check, get_gpu_count, cfm
):
gpu_check.return_value = True
get_gpu_count.return_value = 3
cfm.return_value.get_value.return_value = [0, 1]
context = Context()
self.assertEqual(context.gpus, [0, 1])
@patch("evadb.executor.execution_context.ConfigurationManager")
@patch("evadb.executor.execution_context.os")
@patch("evadb.executor.execution_context.get_gpu_count")
@patch("evadb.executor.execution_context.is_gpu_available")
def test_CUDA_VISIBLE_DEVICES_gets_populated_from_environment_if_no_config(
self, is_gpu, get_gpu_count, os, cfm
):
is_gpu.return_value = True
cfm.return_value.get_value.return_value = []
get_gpu_count.return_value = 3
os.environ.get.return_value = "0,1"
context = Context()
os.environ.get.assert_called_with("CUDA_VISIBLE_DEVICES", "")
self.assertEqual(context.gpus, [0, 1])
@patch("evadb.executor.execution_context.ConfigurationManager")
@patch("evadb.executor.execution_context.os")
@patch("evadb.executor.execution_context.get_gpu_count")
@patch("evadb.executor.execution_context.is_gpu_available")
def test_CUDA_VISIBLE_DEVICES_should_be_empty_if_nothing_provided(
self, gpu_check, get_gpu_count, os, cfm
):
gpu_check.return_value = True
get_gpu_count.return_value = 3
cfm.return_value.get_value.return_value = []
os.environ.get.return_value = ""
context = Context()
os.environ.get.assert_called_with("CUDA_VISIBLE_DEVICES", "")
self.assertEqual(context.gpus, [])
@patch("evadb.executor.execution_context.ConfigurationManager")
@patch("evadb.executor.execution_context.os")
@patch("evadb.executor.execution_context.is_gpu_available")
def test_gpus_ignores_config_if_no_gpu_available(self, gpu_check, os, cfm):
gpu_check.return_value = False
cfm.return_value.get_value.return_value = [0, 1, 2]
os.environ.get.return_value = "0,1,2"
context = Context()
self.assertEqual(context.gpus, [])
@patch("evadb.executor.execution_context.ConfigurationManager")
@patch("evadb.executor.execution_context.os")
@patch("evadb.executor.execution_context.is_gpu_available")
def test_gpu_device_should_return_NO_GPU_if_GPU_not_available(
self, gpu_check, os, cfm
):
gpu_check.return_value = True
cfm.return_value.get_value.return_value = []
os.environ.get.return_value = ""
context = Context()
os.environ.get.assert_called_with("CUDA_VISIBLE_DEVICES", "")
self.assertEqual(context.gpu_device(), NO_GPU)
@patch("evadb.executor.execution_context.ConfigurationManager")
@patch("evadb.executor.execution_context.get_gpu_count")
@patch("evadb.executor.execution_context.is_gpu_available")
def METHOD_NAME(
self, gpu_check, get_gpu_count, cfm
):
gpu_check.return_value = True
get_gpu_count.return_value = 1
cfm.return_value.get_value.return_value = [0, 1, 2]
context = Context()
selected_device = context.gpu_device()
self.assertEqual(selected_device, 0) | null |
1,199 | import os
from galaxy_test.driver import integration_util
THIS_DIR = os.path.dirname(__file__)
PANEL_VIEWS_DIR_1 = os.path.join(THIS_DIR, "panel_views_1")
class TestPanelViewsFromDirectoryIntegration(integration_util.IntegrationTestCase):
framework_tool_and_types = True
allow_tool_conf_override = False
@classmethod
def handle_galaxy_config_kwds(cls, config):
super().handle_galaxy_config_kwds(config)
config["panel_views_dir"] = PANEL_VIEWS_DIR_1
def test_section_copy(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="filter"))
index_as_list = index.json()
sections = [x for x in index_as_list if x["model_class"] == "ToolSection"]
section_names = [s["name"] for s in sections]
assert len(section_names) == 1
assert "For Tours" in section_names
def test_custom_label_order(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="my-custom"))
verify_my_custom(index)
def test_filtering_sections_by_tool_id(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_2"))
index.raise_for_status()
index_as_list = index.json()
sections = [x for x in index_as_list if x["model_class"] == "ToolSection"]
assert len(sections) == 1
section = sections[0]
tools = section["elems"]
assert len(tools) == 3, len(tools)
def test_filtering_sections_by_tool_id_regex(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_3"))
verify_custom_regex_filtered(index)
def METHOD_NAME(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_4"))
index.raise_for_status()
index_as_list = index.json()
assert len(index_as_list) == 2
# Labels are filtered out...
assert model_classes(index_as_list) == ["Tool", "Tool"]
assert element_ids(index_as_list) == ["empty_list", "count_list"]
def test_custom_section_def(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_6"))
index.raise_for_status()
index_as_list = index.json()
assert len(index_as_list) == 1
assert model_classes(index_as_list) == ["ToolSection"]
section = index_as_list[0]
section_elems = section["elems"]
assert len(section_elems) == 4, model_classes(section_elems)
assert model_classes(section_elems) == ["ToolSectionLabel", "Tool", "ToolSectionLabel", "Tool"]
assert element_ids(section_elems) == ["the-start", "empty_list", "the-middle", "count_list"]
def test_section_embed(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_5"))
verify_custom_embed(index)
def test_section_embed_filtering(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_7"))
index.raise_for_status()
index_as_list = index.json()
assert len(index_as_list) == 1
assert model_classes(index_as_list) == ["ToolSection"]
section = index_as_list[0]
section_elems = section["elems"]
assert len(section_elems) == 5, model_classes(section_elems)
assert model_classes(section_elems) == ["Tool", "Tool", "Tool", "ToolSectionLabel", "Tool"]
elem_ids = element_ids(section_elems)
assert elem_ids[0:3] == ["multi_data_optional", "paths_as_file", "param_text_option"]
assert elem_ids[4] == "Filter1"
def test_section_reference_by_name(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_8"))
verify_custom_embed(index)
def test_section_alias(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_9"))
verify_custom_regex_filtered(index)
def test_expand_section_aliases(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_10"))
index.raise_for_status()
index_as_list = index.json()
assert len(index_as_list) == 2
assert model_classes(index_as_list) == ["ToolSection", "ToolSection"]
def test_global_filters(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_11"))
verify_custom_regex_filtered(index)
def test_global_filters_on_integrated_panel(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True, view="custom_12"))
index.raise_for_status()
index_as_list = index.json()
sections = [x for x in index_as_list if x["model_class"] == "ToolSection"]
assert len(sections) == 2
section = sections[0]
assert section["id"] == "test"
tools = section["elems"]
assert len(tools) == 2, len(tools)
class TestPanelViewsFromConfigIntegration(integration_util.IntegrationTestCase):
framework_tool_and_types = True
@classmethod
def handle_galaxy_config_kwds(cls, config):
super().handle_galaxy_config_kwds(config)
config["panel_views"] = [
{
"id": "my-custom",
"name": "My Custom",
"type": "generic",
"items": [
{
"type": "label",
"text": "The Start",
},
{
"type": "tool",
"id": "empty_list",
},
{
"type": "label",
"text": "The Middle",
},
{
"type": "tool",
"id": "count_list",
},
{
"type": "label",
"text": "The End",
},
],
}
]
config["default_panel_view"] = "my-custom"
def test_custom_label_order(self):
index = self.galaxy_interactor.get("tools", data=dict(in_panel=True))
verify_my_custom(index)
def verify_my_custom(index):
index.raise_for_status()
index_as_list = index.json()
sections = [x for x in index_as_list if x["model_class"] == "ToolSection"]
assert len(sections) == 0
assert len(index_as_list) == 5
assert model_classes(index_as_list) == ["ToolSectionLabel", "Tool", "ToolSectionLabel", "Tool", "ToolSectionLabel"]
def verify_custom_embed(index):
# custom_5 / custom_8
index.raise_for_status()
index_as_list = index.json()
assert len(index_as_list) == 1
assert model_classes(index_as_list) == ["ToolSection"]
section = index_as_list[0]
assert section["name"] == "My New Section"
assert section["id"] == "my-new-section"
section_elems = section["elems"]
assert len(section_elems) == 5, model_classes(section_elems)
assert model_classes(section_elems) == ["Tool", "Tool", "Tool", "Tool", "Tool"]
assert element_ids(section_elems) == [
"multi_data_optional",
"paths_as_file",
"param_text_option",
"column_param",
"Filter1",
]
def verify_custom_regex_filtered(index):
# custom_3 / custom_9
index.raise_for_status()
index_as_list = index.json()
sections = [x for x in index_as_list if x["model_class"] == "ToolSection"]
assert len(sections) == 1
section = sections[0]
tools = section["elems"]
assert len(tools) == 2, len(tools)
def element_ids(elements):
return [x["id"] for x in elements]
def model_classes(elements):
return [x["model_class"] for x in elements] | null |