text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(__a: object, __b: object) -> Any: ...<|fim_middle|>eq<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
target = 'classBackgroundLayer('
should_match = 'class BackgroundLayer(cocos.layer.Layer)'
filling = '\n'.join([
'this is a line',
'this is another'
])
text = should_match + '\n' + filling
lines = text.split('\n')
it = enumerate(lines)
start_line = get_start_line(it, target)
assert start_line == 0
assert six.next(it) == (1, lines[1])<|fim_middle|>test_start_1st_line_match<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument("reservoir_model_path", type=str, help="Input Zarr path")
parser.add_argument("output_path", type=str, help="Directory to save outputs to")
parser.add_argument(
"validation_config_path", type=str, help="Path to validation data config"
)
parser.add_argument(
"n_synchronize",
type=int,
help=(
"Number of timesteps from start of validation to use in reservoir "
"synchronization (not used in prediction)."
),
)
parser.add_argument(
"--n-validation-batches",
type=int,
default=None,
help="Number of batch data netcdfs to use for validation. Defaults to use all.",
)
return parser<|fim_middle|>get_parser<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(pidfile):
"""
internal helper.
:returns FilePath: a path to use for file-locking the given pidfile
"""
return pidfile.sibling("{}.lock".format(pidfile.basename()))<|fim_middle|>pidfile_to_lockpath<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# test bottomup training
dataset = self.build_atrw_dataset(data_mode='bottomup')
self.assertEqual(len(dataset), 2)
self.check_data_info_keys(dataset[0], data_mode='bottomup')
# test bottomup testing
dataset = self.build_atrw_dataset(data_mode='bottomup', test_mode=True)
self.assertEqual(len(dataset), 2)
self.check_data_info_keys(dataset[0], data_mode='bottomup')<|fim_middle|>test_bottomup<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
from traits.api import Interface
class IFoo(Interface):
pass
obj = {}
# Global `register_provides`.
register_provides(dict, IFoo)
self.assertEqual(obj, adapt(obj, IFoo))<|fim_middle|>test_global_register_provides<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return str(toml.dumps(self.to_dict())).strip().split('\n')<|fim_middle|>to_toml_string<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
dirname = os.path.join(support.TESTFN, 'Gr\xfc\xdf-\u66e8\u66e9\u66eb')
filename = '\xdf-\u66e8\u66e9\u66eb'
with support.temp_cwd(dirname):
with open(filename, 'wb') as f:
f.write((filename + '\n').encode("utf-8"))
os.access(filename,os.R_OK)
os.remove(filename)<|fim_middle|>test_directory<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
assert self.roidbs
if self.proposals and len(self.proposals.keys()) > 0:
logger.info("merge proposals to annos")
for id, record in enumerate(self.roidbs):
image_id = int(record["im_id"])
if image_id not in self.proposals.keys():
logger.info("image id :{} no proposals".format(image_id))
record["proposals"] = np.array(self.proposals.get(image_id, []), dtype=np.float32)
self.roidbs[id] = record<|fim_middle|>merge_anno_proposals<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.test_graph = create_graph()
from neural_compressor.adaptor.tf_utils.graph_rewriter.generic.fetch_weight_from_reshape import (
FetchWeightFromReshapeOptimizer,
)
convert_graph = FetchWeightFromReshapeOptimizer(self.test_graph).do_transformation()
handled = False
for node in convert_graph.node:
if node.op == "Conv2D" and node.input[1] == "reshape/weight_0":
handled = True
break
self.assertEqual(handled, True)<|fim_middle|>test_fetch_weight_from_reshape_optimizer<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return "DELETE"<|fim_middle|>method<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, value):
"""Helper function to transform the dictionary from strings or integers to bools"""
if value in [0, 'F', 'f']:
return False
if value in [1, 'T', 't']:
return True
return True<|fim_middle|>transform_to_bool<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(command):
assert not match(command)<|fim_middle|>test_not_match<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(inputs):
qml.AmplitudeEmbedding(inputs, wires=range(n_wires), pad_with=0, normalize=True)
return qml.expval(qml.PauliZ(0))<|fim_middle|>circuit_cuda<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(in_length, out_length, ctc_type):
if ctc_type == "builtin" or ctc_type == "cudnnctc":
_ctcloss_sum = torch.nn.CTCLoss(reduction="sum")
def torch_ctcloss(th_pred, th_target, th_ilen, th_olen):
th_pred = th_pred.log_softmax(2)
loss = _ctcloss_sum(th_pred, th_target, th_ilen, th_olen)
# Batch-size average
loss = loss / th_pred.size(1)
return loss
elif ctc_type == "gtnctc":
pytest.importorskip("gtn")
from espnet.nets.pytorch_backend.gtn_ctc import GTNCTCLossFunction
_ctcloss_sum = GTNCTCLossFunction.apply
def torch_ctcloss(th_pred, th_target, th_ilen, th_olen):
targets = [t.tolist() for t in th_target]
log_probs = torch.nn.functional.log_softmax(th_pred, dim=2)
loss = _ctcloss_sum(log_probs, targets, th_ilen, 0, "none")
return loss
n_out = 7
input_length = numpy.array(in_length, dtype=numpy.int32)
label_length = numpy.array(out_length, dtype=numpy.int32)
np_pred = [
numpy.random.rand(il, n_out).astype(numpy.float32) for il in input_length
]
np_target = [
numpy.random.randint(0, n_out, size=ol, dtype=numpy.int32)
for ol in label_length
]
# NOTE: np_pred[i] seems to be transposed and used axis=-1 in e2e_asr.py
ch_pred = F.separate(F.pad_sequence(np_pred), axis=-2)
ch_target = F.pad_sequence(np_target, padding=-1)
ch_loss = F.connectionist_temporal_classification(
ch_pred, ch_target, 0, input_length, label_length
).data
th_pred = pad_list([torch.from_numpy(x) for x in np_pred], 0.0).transpose(0, 1)
if ctc_type == "gtnctc":
# gtn implementation expects targets as list
th_target = np_target
# keep as B x T x H for gtn
th_pred = th_pred.transpose(0, 1)
else:
th_target = torch.from_numpy(numpy.concatenate(np_target))
th_ilen = torch.from_numpy(input_length)
th_olen = torch.from_numpy(label_length)
th_loss = torch_ctcloss(th_pred, th_target, th_ilen, th_olen).numpy()
numpy.testing.assert_allclose(th_loss, ch_loss, 0.05)<|fim_middle|>test_ctc_loss<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")<|fim_middle|>name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, client):
json_input = {"hello":"world"}
client.body_three_types(json_input)
content = b"hello, world"
client.body_three_types(content)<|fim_middle|>test_body_three_types<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""Returns a tuple of (old_layers, new_layers, and optimizer)."""
model = SimpleModel(num_features=1, num_classes=10)
policy: Mapping[Type[torch.nn.Module], module_surgery.ReplacementFunction] = {
torch.nn.Linear: _CopyLinear.from_linear
}
opt = torch.optim.SGD(model.parameters(), lr=.001)
orig_linear_modules = [model.fc1, model.fc2]
module_surgery.replace_module_classes(model, policies=policy, optimizers=opt)
new_linear_modules = [model.fc1, model.fc2]
return orig_linear_modules, new_linear_modules, opt<|fim_middle|>optimizer_surgery_state<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(masks):
tile = DataTile(
np.ones((2 * 2, 10, 10)),
tile_slice=Slice(origin=(0, 0, 0), shape=Shape((2 * 2, 10, 10), sig_dims=2)),
scheme_idx=0,
)
slice_ = masks.get_masks_for_slice(tile.tile_slice)
assert slice_.shape == (100, 5)<|fim_middle|>test_for_datatile_2<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self
) -> None:
def mock_run_cmd(unused_cmd: str) -> str:
return 'scripts/setup.py\nscripts/setup_gae.py'
with self.swap(common, 'run_cmd', mock_run_cmd):
actual_scripts = (
repo_specific_changes_fetcher.get_setup_scripts_changes_status(
'release_tag'))
expected_scripts = {
'scripts/setup.py': True,
'scripts/setup_gae.py': True,
'scripts/install_third_party_libs.py': False,
'scripts/install_third_party.py': False
}
self.assertEqual(actual_scripts, expected_scripts)<|fim_middle|>test_get_setup_scripts_changes_status_to<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, obj):
addObjectProperty(obj, 'FieldName', "S", "App::PropertyString", "Scalar transport",
"Name of the scalar transport field")
addObjectProperty(obj, 'DiffusivityFixed', False, "App::PropertyBool", "Scalar transport",
"Use fixed value for diffusivity rather than viscosity")
# This is actually rho*diffusivity, but this is what OpenFOAM uses
addObjectProperty(obj, 'DiffusivityFixedValue', "0.001 kg/m/s", "App::PropertyQuantity", "Scalar transport",
"Diffusion coefficient for fixed diffusivity")
addObjectProperty(obj, 'RestrictToPhase', False, "App::PropertyBool", "Scalar transport",
"Restrict transport within phase")
addObjectProperty(obj, 'PhaseName', "water", "App::PropertyString", "Scalar transport",
"Transport within phase")
addObjectProperty(obj, 'InjectionRate', '1 kg/s', "App::PropertyQuantity", "Scalar transport",
"Injection rate")
addObjectProperty(obj, 'InjectionPoint', FreeCAD.Vector(0, 0, 0), "App::PropertyPosition", "Scalar transport",
"Location of the injection point")<|fim_middle|>init_properties<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(f, v):
var = tkinter.IntVar(f)
var.set(v)
w = tkinter.Entry(f, textvariable=var, validatecommand=validate_posint, validate="all", width=10)
return w, var<|fim_middle|>posintentry<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> Likelihood:
return Gaussian(variance=Setup.likelihood_variance)<|fim_middle|>likelihood<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request, category):
form = DonationGiftForm(data=request.POST, category=category, request=request)
if form.is_valid():
messages.add_message(request, messages.SUCCESS, "Danke für Deine Bestellung!")
form.save(request)
return get_redirect(request)
messages.add_message(request, messages.ERROR, "Form-Fehler!")
return get_redirect(request, next=request.META.get("HTTP_REFERER", "/"))<|fim_middle|>make_order<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Test unicode(): default arguments with unicode-subclass input.
"""
class UnicodeSubclass(unicode):
pass
s = UnicodeSubclass(u"foo")
loader = Loader()
actual = loader.unicode(s)
self.assertString(actual, u"foo")<|fim_middle|>test_unicode_basic_input_unicode_subclass<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(users):
return "<br><br>" + "<br>".join(users)<|fim_middle|>format_message_for_assign_to<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return self.fHprSnap<|fim_middle|>get_hpr_snap<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Verify the method retrieves data from the User API and caches it."""
username = Faker().user_name()
user_data = {
"username": username,
}
user_url = f"{self.site_configuration.user_api_url}accounts/{username}"
responses.add(responses.GET, user_url, body=json.dumps(user_data), content_type=JSON, status=200)
verification_data = {
"username": "jdoe",
"verified_name": "Jonathan Doe",
"profile_name": "Jon Doe",
"verification_attempt_id": 123,
"proctored_exam_attempt_id": None,
"is_verified": True,
"use_verified_name_for_certs": False,
}
verification_url = f"{self.site_configuration.name_verification_api_url}?username={username}"
responses.add(
responses.GET, verification_url, body=json.dumps(verification_data), content_type=JSON, status=200
)
self.mock_access_token_response()
expected_data = user_data
expected_data["verified_name"] = "Jonathan Doe"
expected_data["use_verified_name_for_certs"] = False
actual = self.site_configuration.get_user_api_data(username)
self.assertEqual(actual, expected_data)
self.assertEqual(len(responses.calls), 3)
# Verify the data is cached
responses.reset()
actual = self.site_configuration.get_user_api_data(username)
self.assertEqual(actual, expected_data)
self.assertEqual(len(responses.calls), 0)<|fim_middle|>test_get_user_api_data_with_cache<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(x):
A, B = get_model_matrix()
start = time.time()
K, _, _ = dlqr(A, B, Q, R)
u = -K @ x
elapsed_time = time.time() - start
print(f"calc time:{elapsed_time:.6f} [sec]")
return u<|fim_middle|>lqr_control<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
email = "[email protected]"
cardholder_name = "Tom %s" % random.randint(1, 10000)
customer_id = "%s" % random.randint(1, 10000)
expiration_date = "10/2012"
number = CreditCardNumbers.MasterCard
postal_code = "44444"
customer = Customer.create({
"id": customer_id,
"email": email,
"credit_card": {
"cardholder_name": cardholder_name,
"expiration_date": expiration_date,
"number": number,
"billing_address": {
"postal_code": postal_code
},
"options": {
"verify_card": True
}
}
}).customer
found_verifications = CreditCardVerification.search(
CreditCardVerificationSearch.credit_card_expiration_date == expiration_date,
CreditCardVerificationSearch.credit_card_cardholder_name == cardholder_name,
CreditCardVerificationSearch.credit_card_number == number,
CreditCardVerificationSearch.customer_email == email,
CreditCardVerificationSearch.customer_id == customer_id,
CreditCardVerificationSearch.billing_postal_code == postal_code
)
self.assertEqual(1, found_verifications.maximum_size)
self.assertEqual(customer.credit_cards[0].token, found_verifications.first.credit_card["token"])<|fim_middle|>test_all_text_fields<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(osd_label=constants.OSD_APP_LABEL, namespace=None):
"""
Fetches info about osd deployments in the cluster
Args:
osd_label (str): label associated with osd deployments
(default: defaults.OSD_APP_LABEL)
namespace (str): Namespace in which ceph cluster lives
(default: config.ENV_DATA["cluster_namespace"])
Returns:
list: OSD deployment OCS instances
"""
namespace = namespace or config.ENV_DATA["cluster_namespace"]
osds = get_deployments_having_label(osd_label, namespace)
return osds<|fim_middle|>get_osd_deployments<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
r"""
:math:`\tau_+`
:rtype: float
"""
return self.__tau_plus<|fim_middle|>tau_plus<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Retrieves the presence of the device
Returns:
bool: True if device is present, False if not
"""
attr_path = HWMON_DIR + self.fan_presence_attr
attr_rv = self.__get_attr_value(attr_path)
if (attr_rv != 'ERR'):
if (attr_rv == '1'):
return True
else:
return False
return None<|fim_middle|>get_presence<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(sparse_nn_data):
with pytest.raises(ValueError):
nearest_neighbors(
sparse_nn_data,
10,
"seuclidean",
{},
False,
np.random,
)<|fim_middle|>test_nn_bad_metric_sparse_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
pytest.importorskip("matplotlib")
plt.figure()
with quantity_support():
x = [1, 2, 3] * u.s
y = [1, 2, 3] * u.m
yerr = [3, 2, 1] * u.cm
fig, ax = plt.subplots()
ax.errorbar(x, y, yerr=yerr)
assert ax.xaxis.get_units() == u.s
assert ax.yaxis.get_units() == u.m<|fim_middle|>test_units_errbarr<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, collection_id, lib_albums):
lib_ids = {x.mb_albumid for x in lib_albums}
albums_in_collection = self._get_albums_in_collection(collection_id)
remove_me = list(set(albums_in_collection) - lib_ids)
for i in range(0, len(remove_me), FETCH_CHUNK_SIZE):
chunk = remove_me[i:i + FETCH_CHUNK_SIZE]
mb_call(
musicbrainzngs.remove_releases_from_collection,
collection_id, chunk
)<|fim_middle|>remove_missing<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(log_records, search_string):
for log_record in logs.records[::-1]:
if isinstance(log_record.msg, str) and search_string in log_record.msg:
return json.loads(log_record.msg)<|fim_middle|>get_last_log_stats_containing_string<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Return irreducible q-points and weights."""
return self._qpoints, self._weights<|fim_middle|>get_qpoints<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, path):
if not HAS_RARFILE:
raise CuckooPackageError("rarfile Python module not installed in guest")
# Check file extension.
path = check_file_extension(path, ".rar")
root = os.environ["TEMP"]
password = self.options.get("password")
exe_regex = re.compile(r"(\.exe|\.scr|\.msi|\.bat|\.lnk)$", flags=re.IGNORECASE)
rarinfos = self.get_infos(path)
self.extract_rar(path, root, password)
file_name = self.options.get("file")
# If no file name is provided via option, take the first file.
if file_name is None:
# No name provided try to find a better name.
if len(rarinfos):
# Attempt to find a valid exe extension in the archive
for f in rarinfos:
if exe_regex.search(f.filename):
file_name = f.filename
break
# Default to the first one if none found
file_name = file_name or rarinfos[0].filename
log.debug("Missing file option, auto executing: %s", file_name)
else:
raise CuckooPackageError("Empty RAR archive")
file_path = os.path.join(root, file_name)
if file_name.lower().endswith(".lnk"):
cmd_path = self.get_path("cmd.exe")
cmd_args = f'/c start /wait "" "{file_path}"'
return self.execute(cmd_path, cmd_args, file_path)
return self.execute(file_path, self.options.get("arguments"), file_path)<|fim_middle|>start<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None:
rule_runner = RuleRunner(
rules=[QueryRule(CompleteEnvironmentVars, [EnvironmentName])],
target_types=[DockerEnvironmentTarget],
inherent_environment=EnvironmentName("docker"),
)
localhost_platform = Platform.create_for_localhost()
if localhost_platform == Platform.linux_arm64:
image_sha = "65a4aad1156d8a0679537cb78519a17eb7142e05a968b26a5361153006224fdc"
platform = Platform.linux_arm64.value
else:
image_sha = "a1801b843b1bfaf77c501e7a6d3f709401a1e0c83863037fa3aab063a7fdb9dc"
platform = Platform.linux_x86_64.value
rule_runner.write_files(
{
"BUILD": dedent(
f"""\
docker_environment(
name='docker',
image='centos@sha256:{image_sha}',
platform='{platform}',
)
"""
)
}
)
rule_runner.set_options(["--environments-preview-names={'docker': '//:docker'}"])
result = dict(rule_runner.request(CompleteEnvironmentVars, []))
# HOSTNAME is not deterministic across machines, so we don't care about the value.
assert "HOSTNAME" in result
result.pop("HOSTNAME")
assert dict(result) == {
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"HOME": "/root",
}<|fim_middle|>test_docker_complete_env_vars<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if self._target_os in ("Macos"):
return False
return True<|fim_middle|>has_as<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")<|fim_middle|>id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
parameters = {
**self.serialize_query_param(
"api-version", "2018-11-01",
required=True,
),
}
return parameters<|fim_middle|>query_parameters<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
setup_class_common("server_config")<|fim_middle|>setup_class<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(datapath):
db = "pipe.neu"
TestSection(db)
OpenDatabase(pjoin(datapath,db))
AddMeshPlot()
DrawPlots()
v = View3DAttributes()
v.viewNormal = (-0.786306, 0.379526, 0.487527)
v.focus = (0.149902, 0.212562, 0.124929)
v.viewUp = (0.316186, 0.925114, -0.210215)
v.viewAngle = 30
v.parallelScale = 0.585963
v.nearPlane = -1.17193
v.farPlane = 1.17193
v.imagePan = (0.0173275, 0.033058)
v.imageZoom = 1.45734
v.perspective = 1
v.eyeAngle = 2
v.centerOfRotationSet = 0
v.centerOfRotation = (0.149902, 0.212562, 0.124929)
SetView3D(v)
Test("PATRAN1_0")
v2 = View3DAttributes()
v2.viewNormal = (-0.786306, 0.379526, 0.487527)
v2.focus = (0.149902, 0.212562, 0.124929)
v2.viewUp = (0.316186, 0.925114, -0.210215)
v2.viewAngle = 30
v2.parallelScale = 0.585963
v2.nearPlane = -1.17193
v2.farPlane = 1.17193
v2.imagePan = (-0.108127, 0.0971661)
v2.imageZoom = 6.92887
v2.perspective = 1
v2.eyeAngle = 2
v2.centerOfRotationSet = 0
v2.centerOfRotation = (0.149902, 0.212562, 0.124929)
SetView3D(v2)
Test("PATRAN1_1")
DeleteActivePlots()
AddPseudocolorPlot()
SetView3D(v)
Test("PATRAN1_2")
DeleteActivePlots()
CloseDatabase(pjoin(datapath,db))<|fim_middle|>test1<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(config, str2id, term_file, terms, item_distribution):
train_data = []
neg_samples = []
with io.open(config.train_data, encoding=config.encoding) as f:
for idx, line in enumerate(f):
if idx % 100000 == 0:
log.info("%s readed %s lines" % (config.train_data, idx))
slots = []
for col_idx, col in enumerate(line.strip("\n").split("\t")):
s = col[: config.max_seqlen]
if s not in str2id:
str2id[s] = len(str2id)
term_file.write(str(col_idx) + "\t" + col + "\n")
item_distribution.append(0)
slots.append(str2id[s])
src = slots[0]
dst = slots[1]
neg_samples.append(slots[2:])
train_data.append((src, dst))
train_data = np.array(train_data, dtype="int64")
np.save(os.path.join(config.graph_work_path, "train_data.npy"), train_data)
if len(neg_samples) != 0:
np.save(os.path.join(config.graph_work_path, "neg_samples.npy"), np.array(neg_samples))<|fim_middle|>load_link_prediction_train_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
The ID of the private endpoint connection.
"""
return pulumi.get(self, "id")<|fim_middle|>id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self._test_mapping(SearchRoute, Route)
queryable_fields = SearchRoute.queryable_fields
self.assertIn('rmina', queryable_fields)
self.assertTrue(queryable_fields['rmina']._range)
self.assertIn('act', queryable_fields)
self.assertIsNotNone(queryable_fields['act']._enum)
self.assertIn('dhei', queryable_fields)
self.assertIn('ralt', queryable_fields)<|fim_middle|>test_route_mapping<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(fname, exp_shape):
"""test_decode_dicom_image"""
dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "test_dicom", fname
)
file_contents = tf.io.read_file(filename=dcm_path)
dcm_image = tfio.image.decode_dicom_image(
contents=file_contents,
dtype=tf.float32,
on_error="strict",
scale="auto",
color_dim=True,
)
assert dcm_image.numpy().shape == exp_shape<|fim_middle|>test_decode_dicom_image<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
current_func = functions[self.function_name].func
params = [si.sv_get(default=[[]], deepcopy=False) for si in self.inputs]
matching_f = list_match_func[self.list_match]
desired_levels = [2 for p in params]
ops = [current_func, self.list_match, self.output_numpy]
result = recurse_f_level_control(params, ops, logic_numpy, matching_f, desired_levels)
self.outputs[0].sv_set(result)<|fim_middle|>process<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_rule_set_formula2<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
# language=rst
"""
Reset the pipeline.
"""
self.network.METHOD_NAME()
self.step_count = 0<|fim_middle|>reset_state_variables<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
with urlopen(self.url) as fd:
content = fd.read().decode()
return content<|fim_middle|>fetch_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
zone = self.driver.list_zones()[0]
name = "test"
type = RecordType.A
data = "200.150.100.50"
record = self.driver.create_record(name, zone, type, data)
self.assertEqual(record.id, "123")
self.assertEqual(record.name, name)
self.assertEqual(record.type, "A")
self.assertEqual(record.data, data)<|fim_middle|>test_create_record<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, arg):
""" Run tests:
* full - run all tests
* unit - run tests (also for each backend)
* any backend name (e.g. pyside2, pyside, pyqt4, etc.) -
run tests for the given backend
* nobackend - run tests that do not require a backend
* extra - run extra tests (line endings and style)
* lineendings - test line ending consistency
* flake - flake style testing (PEP8 and more)
* docs - test docstring parameters for correctness
* examples - run all examples
* examples [examples paths] - run given examples
"""
# Note: By default, "python make full" *will* produce coverage data,
# whereas vispy.test('full') will not. This is because users won't
# really care about coveraged, but developers will.
if not arg:
return self.help('test')
from vispy import METHOD_NAME
try:
args = arg.split(' ')
METHOD_NAME(args[0], ' '.join(args[1:]), coverage=True)
except Exception as err:
print(err)
if not isinstance(err, RuntimeError):
type_, value, tb = sys.exc_info()
traceback.print_exception(type, value, tb)
raise SystemExit(1)<|fim_middle|>test<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
query_context: QueryContext, query_obj: QueryObject, force_cached: bool = False
) -> dict[str, Any]:
datasource = _get_datasource(query_context, query_obj)
query_obj = copy.copy(query_obj)
query_obj.is_timeseries = False
query_obj.orderby = []
query_obj.metrics = None
query_obj.post_processing = []
qry_obj_cols = []
for o in datasource.columns:
if isinstance(o, dict):
qry_obj_cols.append(o.get("column_name"))
else:
qry_obj_cols.append(o.column_name)
query_obj.columns = qry_obj_cols
query_obj.from_dttm = None
query_obj.to_dttm = None
return _get_full(query_context, query_obj, force_cached)<|fim_middle|>get_samples<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
event_yielder = self.yield_events_dag_succeeds()
attribs = {
"start_execution.return_value": ("remote_execution_started", "12345", "abcdefg123"),
"rcv_next.side_effect": event_yielder,
}
self._run_engine(attribs)<|fim_middle|>test_remote_engine_manager<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(batch, separator_token_id):
"""Segment embeddings as described in [1]
The values {0,1} were found in the repository [2].
Attributes:
batch: torch.Tensor, size [batch_size, block_size]
Batch of input.
separator_token_id: int
The value of the token that separates the segments.
[1] Liu, Yang, and Mirella Lapata. "Text summarization with pretrained encoders."
arXiv preprint arXiv:1908.08345 (2019).
[2] https://github.com/nlpyang/PreSumm (/src/prepro/data_builder.py, commit fac1217)
"""
batch_embeddings = []
for sequence in batch:
sentence_num = -1
embeddings = []
for s in sequence:
if s == separator_token_id:
sentence_num += 1
embeddings.append(sentence_num % 2)
batch_embeddings.append(embeddings)
return torch.tensor(batch_embeddings)<|fim_middle|>compute_token_type_ids<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, args: CommonConfig) -> str:
"""Return the base commit or an empty string."""<|fim_middle|>get_base_commit<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None:
import trio
# no warning on accessing the submodule
assert trio.tests
# only when accessing a submodule member
with pytest.warns(TrioDeprecationWarning):
assert trio.tests.test_abc # type: ignore[attr-defined]<|fim_middle|>test_tests_is_deprecated3<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *args, **kwargs):
pass<|fim_middle|>train<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
img: np.ndarray,
model: OnnxModel,
tile_size: TileSize,
separate_alpha: bool,
) -> np.ndarray:
"""Upscales an image with a pretrained model"""
settings = get_settings()
session = get_onnx_session(
model,
settings.gpu_index,
settings.execution_provider,
settings.tensorrt_fp16_mode,
settings.tensorrt_cache_path,
)
input_shape, in_nc, req_width, req_height = get_input_shape(session)
_, out_nc, _, _ = get_output_shape(session)
change_shape = input_shape == "BHWC"
exact_size = None
if req_width is not None:
exact_size = req_width, req_height or req_width
elif req_height is not None:
exact_size = req_width or req_height, req_height
h, w, c = get_h_w_c(img)
logger.debug(f"Image is {h}x{w}x{c}")
return convenient_upscale(
img,
in_nc,
out_nc,
lambda i: upscale(i, session, tile_size, change_shape, exact_size),
separate_alpha,
)<|fim_middle|>upscale_image_node<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, manager):
"""Adopt *manager* into pyplot and make it the active manager."""
if not hasattr(manager, "_cidgcf"):
manager._cidgcf = manager.canvas.mpl_connect(
"button_press_event", lambda event: cls.set_active(manager))
fig = manager.canvas.figure
fig.number = manager.num
label = fig.get_label()
if label:
manager.set_window_title(label)
cls.set_active(manager)<|fim_middle|>set_new_active_manager<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""mapserver exceptions behave with multiple threads"""
workers = []
for i in range(10):
name = 'e%d' % (i)
thread = threading.Thread(target=trigger_exception, name=name, args=(name,))
workers.append(thread)
thread.start()<|fim_middle|>test_exceptions_multi_threads<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
# Testing the initialization when seed=None
# Since internal states randomly generated,
# we just check the output classes
z = function(inputs=[], outputs=[RandomState()])()
aes_res = z[0]
assert isinstance(aes_res, np.random.RandomState)
z = function(inputs=[], outputs=[default_rng()])()
aes_res = z[0]
assert isinstance(aes_res, np.random.Generator)<|fim_middle|>test_random_maker_ops_no_seed<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(subparsers):
autoscale_parser = subparsers.add_parser(
"autoscale",
help="Manually scale a service up and down manually, bypassing the normal autoscaler",
)
autoscale_parser.add_argument(
"-s", "--service", help="Service that you want to stop. Like 'example_service'."
).completer = lazy_choices_completer(list_services)
autoscale_parser.add_argument(
"-i",
"--instance",
help="Instance of the service that you want to stop. Like 'main' or 'canary'.",
required=True,
).completer = lazy_choices_completer(list_instances)
autoscale_parser.add_argument(
"-c",
"--cluster",
help="The PaaSTA cluster that has the service instance you want to stop. Like 'pnw-prod'.",
required=True,
).completer = lazy_choices_completer(list_clusters)
autoscale_parser.add_argument(
"--set", help="Set the number to scale to. Must be an Int.", type=int
)
autoscale_parser.set_defaults(command=paasta_autoscale)<|fim_middle|>add_subparser<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, instance):
attrs = super().METHOD_NAME(instance)
if self.use_row_ordering_attributes:
attrs["id"] = "page_%d" % instance.id
attrs["data-page-title"] = instance.get_admin_display_title()
return attrs<|fim_middle|>get_row_attrs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
model = get_model(
io.StringIO(
"\n".join(
[
par_base,
"DMX 15",
"DMX_0001 16 1",
"DMXR1_0001 58000",
"DMXR2_0001 59000",
]
)
)
)
toas = make_fake_toas_uniform(57000, 57900, 10, model)
with pytest.raises(MissingTOAs) as e:
model.validate_toas(toas)
assert e.value.parameter_names == ["DMX_0001"]
fitter = pint.fitter.WLSFitter(toas, model)
with pytest.raises(MissingTOAs):
fitter.fit_toas()<|fim_middle|>test_dmx_no_toas<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(s: str) -> bool:
if s.startswith(":"):
return True
if ":" in s and "//" in s:
return True
return False<|fim_middle|>looks_like_label<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>(self):<|fim_middle|>get_text<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, p):
'''template_func : tf_format_json
| tf_echo
| tf_generic'''<|fim_middle|>p_template_func<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(pipeline_response):
deserialized = self._deserialize("OperationPage", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)<|fim_middle|>extract_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Render navbar from home page with restricted UI set to True.
This restricts the user to be able to view ONLY relationships on the navbar.
It then checks the UI for these restrictions.
"""
self.add_permissions("extras.view_relationship")
user_permissions = self.user.get_all_permissions()
self.browser.visit(self.live_server_url)
for tab_name, groups in self.navbar.items():
tab_flag = False
for _, items in groups.items():
for _, item_details in items.items():
if item_details["permission"] in user_permissions:
tab_flag = True
# XPath to find tabs using the tab name
tabs = self.browser.find_by_xpath(f"//*[@id='navbar']//*[contains(text(), '{tab_name}')]")
if tab_flag:
self.assertEqual(len(tabs), 1)
else:
self.assertEqual(len(tabs), 0)<|fim_middle|>test_navbar_render_restricted_ui<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(tmp_path: Path) -> None:
src_target = tmp_path / "src.py"
python_codegen(metaschema_file_uri, src_target)
assert os.path.exists(src_target)
with open(src_target) as f:
assert f.read() == inspect.getsource(cg_metaschema)<|fim_middle|>test_meta_schema_gen_up_to_date<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
self.reset()
for driver in list(self._drivers):
self.terminate(driver)<|fim_middle|>cleanup<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
"""Lists all available Relay REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.relay.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2021-11-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)<|fim_middle|>list<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
assert resource_string("missing") is None
assert resource_string("missing", "gone") is None
assert resource_string('test/data/empty.txt') == six.b('empty')
assert resource_string('empty.txt', 'test/data') == six.b('empty')
tmp = tempfile.NamedTemporaryFile('w').name
with open(tmp, "w") as fd:
fd.write("test")
try:
print(resource_string(tmp))
assert resource_string(tmp) == 'test'
(d, fn) = os.path.split(tmp)
assert resource_string(fn, d) == 'test'
except IOError as ex:
raise ex
finally:
try:
os.unlink(tmp)
except Exception:
pass<|fim_middle|>test_resource_string<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(components: Iterable[Union[E3dcBat, E3dcCounter, E3dcInverter,
E3dcExternalInverter]]) -> None:
with client as c:
for component in components:
with SingleComponentUpdateContext(component.component_info):
component.update(c)<|fim_middle|>update_components<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(response, api_timeout=None, headers=None, timeout=None):
"""
:param response: request
:param api_timeout: asynchronous API timeout (will wait forever or until error if None)
:param headers: request headers
:param timeout: connect timeout
:return: request
"""
logger = logging.getLogger(__name__)
location_uri = response.headers.get("Location")
if location_uri is None:
raise Exception(f"no Location header in {response}")
start_time = time.time()
if api_timeout is None:
while True:
done, response = call_finished(location_uri, headers, timeout)
if done:
break
time.sleep(1)
else:
for _ in range(api_timeout):
done, response = call_finished(location_uri, headers, timeout)
if done:
break
time.sleep(1)
if response.status_code == 202:
wait_time = time.time() - start_time
logger.warn(f"API request still not completed after {int(wait_time)} seconds: {response}")
return response
logger.debug(f"DELETE API call to {location_uri}")
requests.delete(location_uri, headers=headers, proxies=get_proxies(location_uri), timeout=timeout)
return response<|fim_middle|>wait_for_async_api<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(write_client):
await write_client.indices.create(
index="test-mapping",
body={
"settings": {"number_of_shards": 1, "number_of_replicas": 0},
"mappings": {
"date_detection": False,
"properties": {
"title": {
"type": "text",
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
},
"created_at": {"type": "date"},
"comments": {
"type": "nested",
"properties": {
"created": {"type": "date"},
"author": {
"type": "text",
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
},
},
},
},
},
},
)
m = await mapping.AsyncMapping.from_opensearch("test-mapping", using=write_client)
assert ["comments", "created_at", "title"] == list(
sorted(m.properties.properties._d_.keys())
)
assert {
"date_detection": False,
"properties": {
"comments": {
"type": "nested",
"properties": {
"created": {"type": "date"},
"author": {
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
"type": "text",
},
},
},
"created_at": {"type": "date"},
"title": {
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
"type": "text",
},
},
} == m.to_dict()
# test same with alias
await write_client.indices.put_alias(index="test-mapping", name="test-alias")
m2 = await mapping.AsyncMapping.from_opensearch("test-alias", using=write_client)
assert m2.to_dict() == m.to_dict()<|fim_middle|>test_mapping_gets_updated_from_opensearch<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_creation<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return "POST"<|fim_middle|>method<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(api_client):
response = api_client.get(
'/submissions/authors',
content_type='application/json',
)
assert response.status_code == 401<|fim_middle|>test_new_author_submit_without_authentication_get<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/AuthorizationRules/{authorizationRuleName}",
**self.url_parameters
)<|fim_middle|>url<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
params: SpinEchoParameters,
platform: Platform,
qubits: Qubits,
) -> SpinEchoData:
"""Data acquisition for SpinEcho"""
# create a sequence of pulses for the experiment:
# Spin Echo 3 Pulses: RX(pi/2) - wait t(rotates z) - RX(pi) - wait t(rotates z) - RX(pi/2) - readout
ro_pulses = {}
RX90_pulses1 = {}
RX_pulses = {}
RX90_pulses2 = {}
sequence = PulseSequence()
for qubit in qubits:
RX90_pulses1[qubit] = platform.create_RX90_pulse(qubit, start=0)
RX_pulses[qubit] = platform.create_RX_pulse(
qubit, start=RX90_pulses1[qubit].finish
)
RX90_pulses2[qubit] = platform.create_RX90_pulse(
qubit, start=RX_pulses[qubit].finish
)
ro_pulses[qubit] = platform.create_qubit_readout_pulse(
qubit, start=RX90_pulses2[qubit].finish
)
sequence.add(RX90_pulses1[qubit])
sequence.add(RX_pulses[qubit])
sequence.add(RX90_pulses2[qubit])
sequence.add(ro_pulses[qubit])
# define the parameter to sweep and its range:
# delay between pulses
ro_wait_range = np.arange(
params.delay_between_pulses_start,
params.delay_between_pulses_end,
params.delay_between_pulses_step,
)
data = SpinEchoData()
# sweep the parameter
for wait in ro_wait_range:
# save data as often as defined by points
for qubit in qubits:
RX_pulses[qubit].start = RX90_pulses1[qubit].finish + wait
RX90_pulses2[qubit].start = RX_pulses[qubit].finish + wait
ro_pulses[qubit].start = RX90_pulses2[qubit].finish
# execute the pulse sequence
results = platform.execute_pulse_sequence(
sequence,
ExecutionParameters(
nshots=params.nshots,
relaxation_time=params.relaxation_time,
acquisition_type=AcquisitionType.INTEGRATION,
averaging_mode=AveragingMode.CYCLIC,
),
)
for qubit in qubits:
result = results[ro_pulses[qubit].serial]
data.register_qubit(
qubit, wait=wait, msr=result.magnitude, phase=result.phase
)
return data<|fim_middle|>acquisition<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, equal_resolution=False, numpify=False, torchify=False):
return METHOD_NAME(
batch_size=self.batch_size,
num_channels=self.num_channels,
min_resolution=self.min_resolution,
max_resolution=self.max_resolution,
equal_resolution=equal_resolution,
numpify=numpify,
torchify=torchify,
)<|fim_middle|>prepare_image_inputs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, terminator: str):
old_terminator = self.handler.terminator
self.handler.terminator = terminator
yield
self.handler.terminator = old_terminator<|fim_middle|>use_terminator<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self, request, context):<|fim_middle|>find_monitored_entity<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(include_unk, expected):
y = '''<|fim_middle|>test_include_unknown<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(task_obj):
return os.path.join(settings.DATA_ROOT, str(task_obj.id))<|fim_middle|>get_task_dirname<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
indices = self.get_codebook_indices()
return (
self.vars.squeeze(0)
.index_select(0, indices)
.view(self.num_vars ** self.groups, -1)
)<|fim_middle|>codebook<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(some_function) -> callable:
""" Decorator for functions (pages) that require a user to provide identification. Returns
403 (forbidden) or 401 (depending on beiwe-api-version) if the identifying info (username,
password, device ID) are invalid.
In any function wrapped with this decorator provide a parameter named "patient_id" (with the
user's id) and a parameter named "password" with an SHA256 hashed instance of the user's
password. """
@functools.wraps(some_function)
def authenticate_and_call(*args, **kwargs):
request: ParticipantRequest = args[0]
assert isinstance(request, HttpRequest), \
f"first parameter of {some_function.__name__} must be an HttpRequest, was {type(request)}."
correct_for_basic_auth(request)
if validate_post(request, require_password=True, registration=True):
return some_function(*args, **kwargs)
is_ios = kwargs.get("OS_API", None) == IOS_API
return abort(401 if is_ios else 403)
return authenticate_and_call<|fim_middle|>authenticate_participant_registration<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None:
theta_3d = create_theta_3d()
N, C, D, W, H = len(theta_3d), 3, 4, 5, 6
data_size = (D, W, H)
for align_corners in (0, 1):
node = onnx.helper.make_node(
"AffineGrid",
inputs=["theta", "size"],
outputs=["grid"],
align_corners=align_corners,
)
original_grid = construct_original_grid(data_size, align_corners)
grid = apply_affine_transform(theta_3d, original_grid)
test_name = "test_affine_grid_3d"
if align_corners == 1:
test_name += "_align_corners"
expect(
node,
inputs=[theta_3d, np.array([N, C, D, W, H], dtype=np.int64)],
outputs=[grid],
name=test_name,
)<|fim_middle|>export_3d_no_reference_evaluator<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, cmd, *args, **kwargs):
return self.node.remoter.METHOD_NAME(f'{self.sudo_needed} docker exec {self.docker_id} /bin/sh -c {shlex.quote(cmd)}', *args, **kwargs)<|fim_middle|>run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
router = Router(dispatcher=handler_dispatcher())
@resource("/_localstack/health")
class TestResource:
def on_post(self, request):
return "POST/OK"
router.add(TestResource())
with pytest.raises(MethodNotAllowed):
assert router.dispatch(Request("GET", "/_localstack/health"))
assert router.dispatch(Request("POST", "/_localstack/health")).get_data(True) == "POST/OK"<|fim_middle|>test_dispatch_to_non_existing_method_raises<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, kube_apis, crd_ingress_controller, virtual_server_setup):
wait_before_test(1)
text = f"{virtual_server_setup.namespace}/{virtual_server_setup.vs_name}"
vs_event_text = f"Configuration for {text} was added or updated"
events_vs = get_events(kube_apis.v1, virtual_server_setup.namespace)
initial_count = assert_event_and_get_count(vs_event_text, events_vs)
vs_src = f"{TEST_DATA}/virtual-server-canned-responses/virtual-server-updated.yaml"
patch_virtual_server_from_yaml(
kube_apis.custom_objects, virtual_server_setup.vs_name, vs_src, virtual_server_setup.namespace
)
wait_and_assert_status_code(501, virtual_server_setup.backend_1_url, virtual_server_setup.vs_host)
resp = requests.get(virtual_server_setup.backend_1_url, headers={"host": virtual_server_setup.vs_host})
resp_content = resp.content.decode("utf-8")
assert resp.headers["content-type"] == "some/type" and resp_content == "{}"
wait_and_assert_status_code(201, virtual_server_setup.backend_2_url, virtual_server_setup.vs_host)
resp = requests.get(virtual_server_setup.backend_2_url, headers={"host": virtual_server_setup.vs_host})
resp_content = resp.content.decode("utf-8")
assert resp.headers["content-type"] == "user-type" and resp_content == "line1\nline2"
vs_events = get_events(kube_apis.v1, virtual_server_setup.namespace)
assert_event_count_increased(vs_event_text, initial_count, vs_events)<|fim_middle|>test_update<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Allows to get u_svd output of the operator
Returns
----------
my_u_svd : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> # Connect inputs : op.inputs. ...
>>> result_u_svd = op.outputs.u_svd()
""" # noqa: E501
return self._u_svd<|fim_middle|>u_svd<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Returns the end value of range (or None if empty)."""
if self.is_empty:
return None
return self.list[-1][1]<|fim_middle|>end<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
run_unittest(TracebackCases, TracebackFormatTests)<|fim_middle|>test_main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(error):
'''Returns the message to show for an error
@param error: error object or string
@returns: 2-tuple of: message string and a boolean
whether a traceback should be shown or not
'''
if isinstance(error, Error):
# An "expected" error
return error.msg, False
elif isinstance(error, EnvironmentError):
# Normal error, e.g. OSError or IOError
msg = error.strerror
if hasattr(error, 'filename') and error.filename:
msg += ': ' + error.filename
return msg, False
else:
# An unexpected error, all other Exception's
msg = _('Looks like you found a bug') # T: generic error dialog
return msg, True<|fim_middle|>get_error_msg<|file_separator|> |