text
stringlengths
2
99k
meta
dict
// SPDX-License-Identifier: GPL-2.0 / { testcase-data { overlay-node { /* test bus */ unittest_test_bus: test-bus { compatible = "simple-bus"; #address-cells = <1>; #size-cells = <0>; unittest100: test-unittest100 { compatible = "unittest"; status = "okay"; reg = <100>; }; unittest101: test-unittest101 { compatible = "unittest"; status = "disabled"; reg = <101>; }; unittest0: test-unittest0 { compatible = "unittest"; status = "disabled"; reg = <0>; }; unittest1: test-unittest1 { compatible = "unittest"; status = "okay"; reg = <1>; }; unittest2: test-unittest2 { compatible = "unittest"; status = "disabled"; reg = <2>; }; unittest3: test-unittest3 { compatible = "unittest"; status = "okay"; reg = <3>; }; unittest5: test-unittest5 { compatible = "unittest"; status = "disabled"; reg = <5>; }; unittest6: test-unittest6 { compatible = "unittest"; status = "disabled"; reg = <6>; }; unittest7: test-unittest7 { compatible = "unittest"; status = "disabled"; reg = <7>; }; unittest8: test-unittest8 { compatible = "unittest"; status = "disabled"; reg = <8>; }; unittest_i2c_test_bus: i2c-test-bus { compatible = "unittest-i2c-bus"; status = "okay"; reg = <50>; #address-cells = <1>; #size-cells = <0>; test-unittest12 { reg = <8>; compatible = "unittest-i2c-dev"; status = "disabled"; }; test-unittest13 { reg = <9>; compatible = "unittest-i2c-dev"; status = "okay"; }; test-unittest14 { reg = <10>; compatible = "unittest-i2c-mux"; status = "okay"; #address-cells = <1>; #size-cells = <0>; i2c@0 { #address-cells = <1>; #size-cells = <0>; reg = <0>; test-mux-dev@20 { reg = <0x20>; compatible = "unittest-i2c-dev"; status = "okay"; }; }; }; }; }; }; }; };
{ "pile_set_name": "Github" }
/* -*- mode: c; tab-width: 2; indent-tabs-mode: nil; -*- Copyright (c) 2012 Marcus Geelnard This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ #ifndef _TINYCTHREAD_H_ #define _TINYCTHREAD_H_ /** * @file * @mainpage TinyCThread API Reference * * @section intro_sec Introduction * TinyCThread is a minimal, portable implementation of basic threading * classes for C. * * They closely mimic the functionality and naming of the C11 standard, and * should be easily replaceable with the corresponding standard variants. * * @section port_sec Portability * The Win32 variant uses the native Win32 API for implementing the thread * classes, while for other systems, the POSIX threads API (pthread) is used. * * @section misc_sec Miscellaneous * The following special keywords are available: #_Thread_local. * * For more detailed information, browse the different sections of this * documentation. A good place to start is: * tinycthread.h. */ /* Which platform are we on? */ #if !defined(_TTHREAD_PLATFORM_DEFINED_) #if defined(_WIN32) || defined(__WIN32__) || defined(__WINDOWS__) #define _TTHREAD_WIN32_ #else #define _TTHREAD_POSIX_ #endif #define _TTHREAD_PLATFORM_DEFINED_ #endif /* Activate some POSIX functionality (e.g. clock_gettime and recursive mutexes) */ #if defined(_TTHREAD_POSIX_) #undef _FEATURES_H #if !defined(_GNU_SOURCE) #define _GNU_SOURCE #endif #if !defined(_POSIX_C_SOURCE) || ((_POSIX_C_SOURCE - 0) < 199309L) #undef _POSIX_C_SOURCE #define _POSIX_C_SOURCE 199309L #endif #if !defined(_XOPEN_SOURCE) || ((_XOPEN_SOURCE - 0) < 500) #undef _XOPEN_SOURCE #define _XOPEN_SOURCE 500 #endif #endif /* Generic includes */ #include <time.h> /* Platform specific includes */ #if defined(_TTHREAD_POSIX_) #include <sys/time.h> #include <pthread.h> #elif defined(_TTHREAD_WIN32_) #ifndef WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MEAN #define __UNDEF_LEAN_AND_MEAN #endif #include <windows.h> #ifdef __UNDEF_LEAN_AND_MEAN #undef WIN32_LEAN_AND_MEAN #undef __UNDEF_LEAN_AND_MEAN #endif #endif /* Workaround for missing TIME_UTC: If time.h doesn't provide TIME_UTC, it's quite likely that libc does not support it either. Hence, fall back to the only other supported time specifier: CLOCK_REALTIME (and if that fails, we're probably emulating clock_gettime anyway, so anything goes). */ #ifndef TIME_UTC #ifdef CLOCK_REALTIME #define TIME_UTC CLOCK_REALTIME #else #define TIME_UTC 0 #endif #endif /* Workaround for missing clock_gettime (most Windows compilers, afaik) */ #if defined(_TTHREAD_WIN32_) || defined(__APPLE_CC__) #define _TTHREAD_EMULATE_CLOCK_GETTIME_ /* Emulate struct timespec */ #if defined(_TTHREAD_WIN32_) struct _ttherad_timespec { time_t tv_sec; long tv_nsec; }; #define timespec _ttherad_timespec #endif /* Emulate clockid_t */ typedef int _tthread_clockid_t; #define clockid_t _tthread_clockid_t /* Emulate clock_gettime */ int _tthread_clock_gettime(clockid_t clk_id, struct timespec *ts); #define clock_gettime _tthread_clock_gettime #define CLOCK_REALTIME 0 #endif /** TinyCThread version (major number). */ #define TINYCTHREAD_VERSION_MAJOR 1 /** TinyCThread version (minor number). */ #define TINYCTHREAD_VERSION_MINOR 1 /** TinyCThread version (full version). */ #define TINYCTHREAD_VERSION (TINYCTHREAD_VERSION_MAJOR * 100 + TINYCTHREAD_VERSION_MINOR) /** * @def _Thread_local * Thread local storage keyword. * A variable that is declared with the @c _Thread_local keyword makes the * value of the variable local to each thread (known as thread-local storage, * or TLS). Example usage: * @code * // This variable is local to each thread. * _Thread_local int variable; * @endcode * @note The @c _Thread_local keyword is a macro that maps to the corresponding * compiler directive (e.g. @c __declspec(thread)). * @note This directive is currently not supported on Mac OS X (it will give * a compiler error), since compile-time TLS is not supported in the Mac OS X * executable format. Also, some older versions of MinGW (before GCC 4.x) do * not support this directive. * @hideinitializer */ /* FIXME: Check for a PROPER value of __STDC_VERSION__ to know if we have C11 */ #if !(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201102L)) && !defined(_Thread_local) #if defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_CC) || defined(__IBMCPP__) #define _Thread_local __thread #else #define _Thread_local __declspec(thread) #endif #endif /* Macros */ #define TSS_DTOR_ITERATIONS 0 /* Function return values */ #define thrd_error 0 /**< The requested operation failed */ #define thrd_success 1 /**< The requested operation succeeded */ #define thrd_timeout 2 /**< The time specified in the call was reached without acquiring the requested resource */ #define thrd_busy 3 /**< The requested operation failed because a tesource requested by a test and return function is already in use */ #define thrd_nomem 4 /**< The requested operation failed because it was unable to allocate memory */ /* Mutex types */ #define mtx_plain 1 #define mtx_timed 2 #define mtx_try 4 #define mtx_recursive 8 /* Mutex */ #if defined(_TTHREAD_WIN32_) typedef struct { CRITICAL_SECTION mHandle; /* Critical section handle */ int mAlreadyLocked; /* TRUE if the mutex is already locked */ int mRecursive; /* TRUE if the mutex is recursive */ } mtx_t; #else typedef pthread_mutex_t mtx_t; #endif /** Create a mutex object. * @param mtx A mutex object. * @param type Bit-mask that must have one of the following six values: * @li @c mtx_plain for a simple non-recursive mutex * @li @c mtx_timed for a non-recursive mutex that supports timeout * @li @c mtx_try for a non-recursive mutex that supports test and return * @li @c mtx_plain | @c mtx_recursive (same as @c mtx_plain, but recursive) * @li @c mtx_timed | @c mtx_recursive (same as @c mtx_timed, but recursive) * @li @c mtx_try | @c mtx_recursive (same as @c mtx_try, but recursive) * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int mtx_init(mtx_t *mtx, int type); /** Release any resources used by the given mutex. * @param mtx A mutex object. */ void mtx_destroy(mtx_t *mtx); /** Lock the given mutex. * Blocks until the given mutex can be locked. If the mutex is non-recursive, and * the calling thread already has a lock on the mutex, this call will block * forever. * @param mtx A mutex object. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int mtx_lock(mtx_t *mtx); /** NOT YET IMPLEMENTED. */ int mtx_timedlock(mtx_t *mtx, const struct timespec *ts); /** Try to lock the given mutex. * The specified mutex shall support either test and return or timeout. If the * mutex is already locked, the function returns without blocking. * @param mtx A mutex object. * @return @ref thrd_success on success, or @ref thrd_busy if the resource * requested is already in use, or @ref thrd_error if the request could not be * honored. */ int mtx_trylock(mtx_t *mtx); /** Unlock the given mutex. * @param mtx A mutex object. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int mtx_unlock(mtx_t *mtx); /* Condition variable */ #if defined(_TTHREAD_WIN32_) typedef struct { HANDLE mEvents[2]; /* Signal and broadcast event HANDLEs. */ unsigned int mWaitersCount; /* Count of the number of waiters. */ CRITICAL_SECTION mWaitersCountLock; /* Serialize access to mWaitersCount. */ } cnd_t; #else typedef pthread_cond_t cnd_t; #endif /** Create a condition variable object. * @param cond A condition variable object. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int cnd_init(cnd_t *cond); /** Release any resources used by the given condition variable. * @param cond A condition variable object. */ void cnd_destroy(cnd_t *cond); /** Signal a condition variable. * Unblocks one of the threads that are blocked on the given condition variable * at the time of the call. If no threads are blocked on the condition variable * at the time of the call, the function does nothing and return success. * @param cond A condition variable object. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int cnd_signal(cnd_t *cond); /** Broadcast a condition variable. * Unblocks all of the threads that are blocked on the given condition variable * at the time of the call. If no threads are blocked on the condition variable * at the time of the call, the function does nothing and return success. * @param cond A condition variable object. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int cnd_broadcast(cnd_t *cond); /** Wait for a condition variable to become signaled. * The function atomically unlocks the given mutex and endeavors to block until * the given condition variable is signaled by a call to cnd_signal or to * cnd_broadcast. When the calling thread becomes unblocked it locks the mutex * before it returns. * @param cond A condition variable object. * @param mtx A mutex object. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int cnd_wait(cnd_t *cond, mtx_t *mtx); /** Wait for a condition variable to become signaled. * The function atomically unlocks the given mutex and endeavors to block until * the given condition variable is signaled by a call to cnd_signal or to * cnd_broadcast, or until after the specified time. When the calling thread * becomes unblocked it locks the mutex before it returns. * @param cond A condition variable object. * @param mtx A mutex object. * @param xt A point in time at which the request will time out (absolute time). * @return @ref thrd_success upon success, or @ref thrd_timeout if the time * specified in the call was reached without acquiring the requested resource, or * @ref thrd_error if the request could not be honored. */ int cnd_timedwait(cnd_t *cond, mtx_t *mtx, const struct timespec *ts); /* Thread */ #if defined(_TTHREAD_WIN32_) typedef HANDLE thrd_t; #else typedef pthread_t thrd_t; #endif /** Thread start function. * Any thread that is started with the @ref thrd_create() function must be * started through a function of this type. * @param arg The thread argument (the @c arg argument of the corresponding * @ref thrd_create() call). * @return The thread return value, which can be obtained by another thread * by using the @ref thrd_join() function. */ typedef int (*thrd_start_t)(void *arg); /** Create a new thread. * @param thr Identifier of the newly created thread. * @param func A function pointer to the function that will be executed in * the new thread. * @param arg An argument to the thread function. * @return @ref thrd_success on success, or @ref thrd_nomem if no memory could * be allocated for the thread requested, or @ref thrd_error if the request * could not be honored. * @note A thread’s identifier may be reused for a different thread once the * original thread has exited and either been detached or joined to another * thread. */ int thrd_create(thrd_t *thr, thrd_start_t func, void *arg); /** Identify the calling thread. * @return The identifier of the calling thread. */ thrd_t thrd_current(void); /** NOT YET IMPLEMENTED. */ int thrd_detach(thrd_t thr); /** Compare two thread identifiers. * The function determines if two thread identifiers refer to the same thread. * @return Zero if the two thread identifiers refer to different threads. * Otherwise a nonzero value is returned. */ int thrd_equal(thrd_t thr0, thrd_t thr1); /** Terminate execution of the calling thread. * @param res Result code of the calling thread. */ void thrd_exit(int res); /** Wait for a thread to terminate. * The function joins the given thread with the current thread by blocking * until the other thread has terminated. * @param thr The thread to join with. * @param res If this pointer is not NULL, the function will store the result * code of the given thread in the integer pointed to by @c res. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int thrd_join(thrd_t thr, int *res); /** Put the calling thread to sleep. * Suspend execution of the calling thread. * @param time_point A point in time at which the thread will resume (absolute time). * @param remaining If non-NULL, this parameter will hold the remaining time until * time_point upon return. This will typically be zero, but if * the thread was woken up by a signal that is not ignored before * time_point was reached @c remaining will hold a positive * time. * @return 0 (zero) on successful sleep, or -1 if an interrupt occurred. */ int thrd_sleep(const struct timespec *time_point, struct timespec *remaining); /** Yield execution to another thread. * Permit other threads to run, even if the current thread would ordinarily * continue to run. */ void thrd_yield(void); /* Thread local storage */ #if defined(_TTHREAD_WIN32_) typedef DWORD tss_t; #else typedef pthread_key_t tss_t; #endif /** Destructor function for a thread-specific storage. * @param val The value of the destructed thread-specific storage. */ typedef void (*tss_dtor_t)(void *val); /** Create a thread-specific storage. * @param key The unique key identifier that will be set if the function is * successful. * @param dtor Destructor function. This can be NULL. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. * @note The destructor function is not supported under Windows. If @c dtor is * not NULL when calling this function under Windows, the function will fail * and return @ref thrd_error. */ int tss_create(tss_t *key, tss_dtor_t dtor); /** Delete a thread-specific storage. * The function releases any resources used by the given thread-specific * storage. * @param key The key that shall be deleted. */ void tss_delete(tss_t key); /** Get the value for a thread-specific storage. * @param key The thread-specific storage identifier. * @return The value for the current thread held in the given thread-specific * storage. */ void *tss_get(tss_t key); /** Set the value for a thread-specific storage. * @param key The thread-specific storage identifier. * @param val The value of the thread-specific storage to set for the current * thread. * @return @ref thrd_success on success, or @ref thrd_error if the request could * not be honored. */ int tss_set(tss_t key, void *val); #endif /* _TINYTHREAD_H_ */
{ "pile_set_name": "Github" }
import path from 'path' import fs from 'fs' import { PmtError } from './errors' import { fileExists } from './shell' export const translateDatasourceUrl = (url: string, cwd?: string): string => { if (url.startsWith('file:') && !url.startsWith('file:/')) { return 'file:' + path.join(cwd || process.cwd(), url.replace('file:', '')).replace(/\\/g, '/') } return url } export const getManagementEnv = async (): Promise<{ [name: string]: string }> => { if (!process.env.MANAGEMENT_URL) { throw new PmtError('missing-env', { name: 'MANAGEMENT_URL' }) } const managementUrl = translateDatasourceUrl(process.env.MANAGEMENT_URL) return { PMT_MANAGEMENT_URL: managementUrl, PMT_OUTPUT: 'PMT_TMP', } } export const setManagementEnv = async (): Promise<void> => { const managementEnv = await getManagementEnv() Object.entries(managementEnv).forEach(([key, value]) => (process.env[key] = value)) } export const envPaths = [ 'prisma/.env', 'db/.env', // Blitz '../.env.defaults', // Redwood '.env', ] export const getEnvPath = async (schemaPath?: string): Promise<string> => { if (schemaPath) { const envPath = path.join(path.dirname(schemaPath), '.env') if (await fileExists(envPath)) { return envPath } } for (const envPath of envPaths) { if (await fileExists(envPath)) { return envPath } } throw new Error("Couldn't find the prisma/.env file") } export const readEnvFile = async (schemaPath?: string): Promise<string> => { const path = await getEnvPath(schemaPath) return fs.promises.readFile(path, 'utf-8') } export const writeEnvFile = async (content: string, schemaPath?: string): Promise<void> => { let path try { path = await getEnvPath(schemaPath) } catch { // Can't get path? Then we force write it to prisma/.env path = 'prisma/.env' } return fs.promises.writeFile(path, content) } export const schemaPaths = [ 'prisma/schema.prisma', 'db/schema.prisma', // Blitz 'api/prisma/schema.prisma', // Redwood 'schema.prisma', ] export const getSchemaPath = async (): Promise<string> => { for (const schemaPath of schemaPaths) { if (await fileExists(schemaPath)) { return schemaPath } } throw new Error("Couldn't find the schema file") } export const readSchemaFile = async (schemaPath?: string): Promise<string> => { const path = schemaPath || (await getSchemaPath()) return fs.promises.readFile(path, 'utf-8') } export const writeSchemaFile = async (content: string, schemaPath?: string): Promise<void> => { const path = schemaPath || (await getSchemaPath()) return fs.promises.writeFile(path, content, 'utf-8') }
{ "pile_set_name": "Github" }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.client.hadoop; import alluxio.conf.PropertyKey; import alluxio.conf.ServerConfiguration; import alluxio.hadoop.FileSystem; import alluxio.hadoop.HadoopConfigurationUtils; import alluxio.security.authentication.AuthType; import alluxio.testutils.LocalAlluxioClusterResource; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import java.net.URI; /** * Integration tests for {@link FileSystem}. */ public class FileSystemIntegrationTest { @ClassRule public static LocalAlluxioClusterResource sLocalAlluxioClusterResource = new LocalAlluxioClusterResource.Builder() .setProperty(PropertyKey.SECURITY_AUTHENTICATION_TYPE, AuthType.SIMPLE.getAuthName()) .setProperty(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_ENABLED, "true") .build(); private static org.apache.hadoop.fs.FileSystem sTFS; @BeforeClass public static void beforeClass() throws Exception { Configuration conf = new Configuration(); conf.set("fs.alluxio.impl", FileSystem.class.getName()); URI uri = URI.create(sLocalAlluxioClusterResource.get().getMasterURI()); sTFS = org.apache.hadoop.fs.FileSystem.get(uri, HadoopConfigurationUtils .mergeAlluxioConfiguration(conf, ServerConfiguration.global())); } @Test public void closeFileSystem() throws Exception { Path file = new Path("/createfile"); FsPermission permission = FsPermission.createImmutable((short) 0666); FSDataOutputStream o = sTFS.create(file, permission, false /* ignored */, 10 /* ignored */, (short) 1 /* ignored */, 512 /* ignored */, null /* ignored */); o.writeBytes("Test Bytes"); o.close(); // with mark of delete-on-exit, the close method will try to delete it sTFS.deleteOnExit(file); sTFS.close(); } }
{ "pile_set_name": "Github" }
\begin{tikzpicture} \begin{scope} \tikzstyle{lnode} = [minimum height=1.5em,minimum width=3em,inner sep=3pt,rounded corners=1.5pt,draw,fill=orange!20]; \tikzstyle{standard} = [rounded corners=3pt] \node [lnode,anchor=west] (l1) at (0,0) {\scriptsize{子层$n$}}; \node [lnode,anchor=west] (l2) at ([xshift=3em]l1.east) {\scriptsize{层正则化}}; \node [lnode,anchor=west] (l3) at ([xshift=4em]l2.east) {\scriptsize{层正则化}}; \node [lnode,anchor=west] (l4) at ([xshift=1.5em]l3.east) {\scriptsize{子层$n$}}; \node [anchor=west] (plus1) at ([xshift=0.9em]l1.east) {\scriptsize{$\mathbf{\oplus}$}}; \node [anchor=west] (plus2) at ([xshift=0.9em]l4.east) {\scriptsize{$\mathbf{\oplus}$}}; \node [anchor=north] (label1) at ([xshift=3em,yshift=-0.5em]l1.south) {\scriptsize{(a)后正则化}}; \node [anchor=north] (label2) at ([xshift=3em,yshift=-0.5em]l3.south) {\scriptsize{(b)前正则化}}; \draw [->,thick] ([xshift=-1.5em]l1.west) -- ([xshift=-0.1em]l1.west); \draw [->,thick] ([xshift=0.1em]l1.east) -- ([xshift=0.2em]plus1.west); \draw [->,thick] ([xshift=-0.2em]plus1.east) -- ([xshift=-0.1em]l2.west); \draw [->,thick] ([xshift=0.1em]l2.east) -- ([xshift=1em]l2.east); \draw [->,thick] ([xshift=-1.5em]l3.west) -- ([xshift=-0.1em]l3.west); \draw [->,thick] ([xshift=0.1em]l3.east) -- ([xshift=-0.1em]l4.west); \draw [->,thick] ([xshift=0.1em]l4.east) -- ([xshift=0.2em]plus2.west); \draw [->,thick] ([xshift=-0.2em]plus2.east) -- ([xshift=1em]plus2.east); \draw[->,standard,thick] ([xshift=-0.8em]l1.west) -- ([xshift=-0.8em,yshift=2em]l1.west) -- ([yshift=2em]plus1.center) -- ([yshift=-0.2em]plus1.north); \draw[->,standard,thick] ([xshift=-0.8em]l3.west) -- ([xshift=-0.8em,yshift=2em]l3.west) -- ([yshift=2em]plus2.center) -- ([yshift=-0.2em]plus2.north); \end{scope} \end{tikzpicture}
{ "pile_set_name": "Github" }
/* * Copyright (c) Minh Loi. * * This file is part of Ulangi which is released under GPL v3.0. * See LICENSE or go to https://www.gnu.org/licenses/gpl-3.0.txt */ import { assertExists } from '@ulangi/assert'; import { loadConfig } from '../../src/setup/loadConfig'; import { resolveEnv } from '../../src/setup/resolveEnv'; import { GoogleTextToSpeechAdapter } from './GoogleTextToSpeechAdapter'; describe('GoogleTextToSpeechAdapterTest', (): void => { const config = loadConfig(); const env = resolveEnv(); let googleTextToSpeechAdapter: GoogleTextToSpeechAdapter; beforeEach( (): void => { googleTextToSpeechAdapter = new GoogleTextToSpeechAdapter( assertExists(env.GOOGLE_CLOUD_PROJECT_ID), assertExists(env.GOOGLE_CLOUD_SERVICE_ACCOUNT) ); } ); it(`should synthesizes speech by voiceName successfully`, async (): Promise< void > => { const data = await googleTextToSpeechAdapter.synthesizeSpeechByLanguageCodeAndVoiceName( 'test', config.googleTextToSpeech.defaultVoices['ja'].languageCode, config.googleTextToSpeech.defaultVoices['ja'].voiceName ); expect(data[0].audioContent).toBeDefined(); }); });
{ "pile_set_name": "Github" }
VictoryRoad_2F_MapScripts:: @ 8160F92 map_script MAP_SCRIPT_ON_LOAD, VictoryRoad_2F_OnLoad .byte 0 VictoryRoad_2F_OnLoad:: @ 8160F98 compare VAR_MAP_SCENE_VICTORY_ROAD_2F_BOULDER1, 100 call_if_ne VictoryRoad_2F_EventScript_SetRockBarrier1 compare VAR_MAP_SCENE_VICTORY_ROAD_2F_BOULDER2, 100 call_if_ne VictoryRoad_2F_EventScript_SetRockBarrier2 end VictoryRoad_2F_EventScript_SetRockBarrier1:: @ 8160FAF setmetatile 13, 10, METATILE_Cave_RockBarrier_Top, 1 setmetatile 13, 11, METATILE_Cave_RockBarrier_Bottom, 1 return VictoryRoad_2F_EventScript_SetRockBarrier2:: @ 8160FC2 setmetatile 33, 16, METATILE_Cave_RockBarrier_Top, 1 setmetatile 33, 17, METATILE_Cave_RockBarrier_Bottom, 1 return VictoryRoad_2F_EventScript_FloorSwitch1:: @ 8160FD5 lockall compare VAR_MAP_SCENE_VICTORY_ROAD_2F_BOULDER1, 100 goto_if_eq VictoryRoad_2F_EventScript_FloorSwitch1AlreadyPressed setmetatile 13, 10, METATILE_Cave_Floor_Ledge_Top, 0 setmetatile 13, 11, METATILE_Cave_Floor_Ledge_Bottom, 0 playse SE_ICE_BREAK special DrawWholeMapView waitse moveobjectoffscreen 11 setvar VAR_MAP_SCENE_VICTORY_ROAD_2F_BOULDER1, 100 releaseall end VictoryRoad_2F_EventScript_FloorSwitch1AlreadyPressed:: @ 8161004 releaseall end VictoryRoad_2F_EventScript_FloorSwitch2:: @ 8161006 lockall compare VAR_MAP_SCENE_VICTORY_ROAD_2F_BOULDER2, 100 goto_if_eq VictoryRoad_2F_EventScript_FloorSwitch2AlreadyPressed setmetatile 33, 16, METATILE_Cave_Floor_Ledge_Top, 0 setmetatile 33, 17, METATILE_Cave_Floor_Ledge_Bottom, 0 playse SE_ICE_BREAK special DrawWholeMapView waitse moveobjectoffscreen 12 setvar VAR_MAP_SCENE_VICTORY_ROAD_2F_BOULDER2, 100 releaseall end VictoryRoad_2F_EventScript_FloorSwitch2AlreadyPressed:: @ 8161035 releaseall end VictoryRoad_2F_EventScript_Dawson:: @ 8161037 trainerbattle_single TRAINER_POKEMANIAC_DAWSON, VictoryRoad_2F_Text_DawsonIntro, VictoryRoad_2F_Text_DawsonDefeat msgbox VictoryRoad_2F_Text_DawsonPostBattle, MSGBOX_AUTOCLOSE end VictoryRoad_2F_EventScript_Daisuke:: @ 816104E trainerbattle_single TRAINER_BLACK_BELT_DAISUKE, VictoryRoad_2F_Text_DaisukeIntro, VictoryRoad_2F_Text_DaisukeDefeat msgbox VictoryRoad_2F_Text_DaisukePostBattle, MSGBOX_AUTOCLOSE end VictoryRoad_2F_EventScript_Nelson:: @ 8161065 trainerbattle_single TRAINER_JUGGLER_NELSON, VictoryRoad_2F_Text_NelsonIntro, VictoryRoad_2F_Text_NelsonDefeat msgbox VictoryRoad_2F_Text_NelsonPostBattle, MSGBOX_AUTOCLOSE end VictoryRoad_2F_EventScript_Gregory:: @ 816107C trainerbattle_single TRAINER_JUGGLER_GREGORY, VictoryRoad_2F_Text_GregoryIntro, VictoryRoad_2F_Text_GregoryDefeat msgbox VictoryRoad_2F_Text_GregoryPostBattle, MSGBOX_AUTOCLOSE end VictoryRoad_2F_EventScript_Vincent:: @ 8161093 trainerbattle_single TRAINER_TAMER_VINCENT, VictoryRoad_2F_Text_VincentIntro, VictoryRoad_2F_Text_VincentDefeat msgbox VictoryRoad_2F_Text_VincentPostBattle, MSGBOX_AUTOCLOSE end
{ "pile_set_name": "Github" }
/* +----------------------------------------------------------------------+ | HipHop for PHP | +----------------------------------------------------------------------+ | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) | | Copyright (c) 1997-2010 The PHP Group | +----------------------------------------------------------------------+ | This source file is subject to version 3.01 of the PHP license, | | that is bundled with this package in the file LICENSE, and is | | available through the world-wide-web at the following url: | | http://www.php.net/license/3_01.txt | | If you did not receive a copy of the PHP license and are unable to | | obtain it through the world-wide-web, please send a note to | | [email protected] so we can mail you a copy immediately. | +----------------------------------------------------------------------+ */ #include "hphp/runtime/base/array-init.h" #include "hphp/runtime/ext/icu/ext_icu_calendar.h" #include "hphp/runtime/ext/icu/ext_icu_timezone.h" #include "hphp/runtime/ext/icu/ext_icu_iterator.h" namespace HPHP { namespace Intl { ///////////////////////////////////////////////////////////////////////////// const StaticString s_IntlCalendar("IntlCalendar"), s_IntlGregorianCalendar("IntlGregorianCalendar"); Class* IntlCalendar::c_IntlCalendar = nullptr; const icu::Calendar* IntlCalendar::ParseArg(const Variant& cal, const icu::Locale &locale, const String &funcname, IntlError* err, int64_t &calType, bool &calOwned) { icu::Calendar *ret = nullptr; UErrorCode error = U_ZERO_ERROR; if (cal.isNull()) { ret = new icu::GregorianCalendar(locale, error); calType = UCAL_GREGORIAN; calOwned = true; } else if (cal.isInteger()) { calType = cal.toInt64(); if (calType != UCAL_GREGORIAN && calType != UCAL_TRADITIONAL) { err->setError(U_ILLEGAL_ARGUMENT_ERROR, "%s: invalid value for calendar type; it must be " "one of IntlDateFormatter::TRADITIONAL (locale's default " "calendar) or IntlDateFormatter::GREGORIAN. " "Alternatively, it can be an IntlCalendar object", funcname.c_str()); return nullptr; } ret = (calType == UCAL_TRADITIONAL) ? (icu::Calendar::createInstance(locale, error)) : (new icu::GregorianCalendar(locale, error)); calOwned = true; } else if (cal.isObject()) { auto IntlCalendar_Class = Class::lookup(s_IntlCalendar.get()); auto obj = cal.toObject(); auto cls = obj->getVMClass(); if (!IntlCalendar_Class || ((cls != IntlCalendar_Class) && !cls->classof(IntlCalendar_Class))) { goto bad_argument; } auto data = IntlCalendar::Get(obj.get()); if (!data) { // ::Get raises errors return nullptr; } calOwned = false; return data->calendar(); } else { bad_argument: err->setError(U_ILLEGAL_ARGUMENT_ERROR, "%s: Invalid calendar argument; should be an integer " "or an IntlCalendar instance", funcname.c_str()); return nullptr; } if (ret) { if (U_SUCCESS(error)) { return ret; } delete ret; } if (!U_FAILURE(error)) { error = U_MEMORY_ALLOCATION_ERROR; } err->setError(error, "%s: Failure instantiating calendar", funcname.c_str()); return nullptr; } ///////////////////////////////////////////////////////////////////////////// // Methods #define CAL_FETCH(dest, src, def) \ auto dest = IntlCalendar::Get(src); \ if (!dest) { \ return def; \ } #define CAL_CHECK_FIELD(field, func) \ if ((field < 0) || (field > UCAL_FIELD_COUNT)) { \ data->setError(U_ILLEGAL_ARGUMENT_ERROR, \ "%s: invalid field", func); \ return false; \ } #define GCAL_FETCH(dest, src, def) \ auto dest = IntlCalendar::Get(src); \ if (!dest || !dest->gcal()) { \ return def; \ } static bool HHVM_METHOD(IntlCalendar, add, int64_t field, int64_t amount) { CAL_FETCH(data, this_, false); CAL_CHECK_FIELD(field, "intlcal_add"); if ((amount < INT32_MIN) || (amount > INT32_MAX)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_add: amount out of bounds"); return false; } UErrorCode error = U_ZERO_ERROR; data->calendar()->add((UCalendarDateFields)field, (int32_t)amount, error); if (U_FAILURE(error)) { data->setError(error, "intlcal_add: Call to underlying method failed"); return false; } return true; } static bool intlcal_compare(ObjectData* this_, const Object& that_, UBool (icu::Calendar::*func)(const icu::Calendar&, UErrorCode&) const) { CAL_FETCH(obj1, this_, false); CAL_FETCH(obj2, that_.get(), false); UErrorCode error = U_ZERO_ERROR; UBool res = (obj1->calendar()->*func)(*obj2->calendar(), error); if (U_FAILURE(error)) { obj1->setError(error, "intlcal_before/after: Error calling ICU method"); return false; } return res; } static bool HHVM_METHOD(IntlCalendar, after, const Object& other) { return intlcal_compare(this_, other, &icu::Calendar::after); } static bool HHVM_METHOD(IntlCalendar, before, const Object& other) { return intlcal_compare(this_, other, &icu::Calendar::before); } static bool HHVM_METHOD(IntlCalendar, clear, const Variant& field) { CAL_FETCH(data, this_, false); if (field.isNull()) { data->calendar()->clear(); } else { data->calendar()->clear((UCalendarDateFields)field.toInt64()); } return true; } static Object HHVM_STATIC_METHOD(IntlCalendar, createInstance, const Variant& timeZone, const String& locale) { icu::TimeZone *tz = IntlTimeZone::ParseArg(timeZone, "intlcal_create_instance", s_intl_error.get()); if (!tz) { return Object(); } String loc = localeOrDefault(locale); UErrorCode error = U_ZERO_ERROR; icu::Calendar *cal = icu::Calendar::createInstance(tz, icu::Locale::createFromName(loc.c_str()), error); if (!cal) { delete tz; s_intl_error->setError(error, "Error creating ICU Calendar object"); return Object(); } return IntlCalendar::newInstance(cal); } static bool HHVM_METHOD(IntlCalendar, equals, const Object& other) { return intlcal_compare(this_, other, &icu::Calendar::equals); } static Variant HHVM_METHOD(IntlCalendar, fieldDifference, const Variant& when, int64_t field) { CAL_FETCH(data, this_, false); CAL_CHECK_FIELD(field, "intlcal_field_difference"); UErrorCode error = U_ZERO_ERROR; int64_t ret = data->calendar()->fieldDifference( (UDate)when.toDouble(), (UCalendarDateFields)field, error); if (U_FAILURE(error)) { data->setError(error, "intlcal_field_difference: " "Call to ICU method has failed"); return false; } return ret; } static Variant intlcal_field_method(const Object& obj, int64_t field, int32_t (icu::Calendar::*func)(UCalendarDateFields, UErrorCode&) const, const char *method_name) { CAL_FETCH(data, obj.get(), false); CAL_CHECK_FIELD(field, method_name); UErrorCode error = U_ZERO_ERROR; int64_t ret = (data->calendar()->*func)((UCalendarDateFields)field, error); if (U_FAILURE(error)) { data->setError(error, "Call to ICU method has failed"); return false; } return ret; } static Variant intlcal_field_method(const Object& obj, int64_t field, int32_t (icu::Calendar::*func)(UCalendarDateFields) const, const char *method_name) { CAL_FETCH(data, obj.get(), false); CAL_CHECK_FIELD(field, method_name); return (data->calendar()->*func)((UCalendarDateFields)field); } static Variant HHVM_METHOD(IntlCalendar, get, int64_t field) { return intlcal_field_method(Object{this_}, field, &icu::Calendar::get, "intlcal_get"); } static Variant HHVM_METHOD(IntlCalendar, getActualMaximum, int64_t field) { return intlcal_field_method(Object{this_}, field, &icu::Calendar::getActualMaximum, "intlcal_get_actual_maximum"); } static Variant HHVM_METHOD(IntlCalendar, getActualMinimum, int64_t field) { return intlcal_field_method(Object{this_}, field, &icu::Calendar::getActualMinimum, "intlcal_get_actual_minimum"); } static Array HHVM_STATIC_METHOD(IntlCalendar, getAvailableLocales) { int32_t count; const icu::Locale *availLocales = icu::Calendar::getAvailableLocales(count); VArrayInit ret(count); for (int i = 0; i < count; ++i) { ret.append(String(availLocales[i].getName(), CopyString)); } return ret.toArray(); } static int64_t HHVM_METHOD(IntlCalendar, getErrorCode) { CAL_FETCH(data, this_, 0); return data->getErrorCode(); } static String HHVM_METHOD(IntlCalendar, getErrorMessage) { CAL_FETCH(data, this_, String()); return data->getErrorMessage(); } static Variant HHVM_METHOD(IntlCalendar, getFirstDayOfWeek) { CAL_FETCH(data, this_, false); UErrorCode error = U_ZERO_ERROR; int64_t ret = data->calendar()->getFirstDayOfWeek(error); if (U_FAILURE(error)) { data->setError(error, "Call to ICU method has failed"); return false; } return ret; } static Variant HHVM_METHOD(IntlCalendar, getGreatestMinimum, int64_t field) { return intlcal_field_method(Object{this_}, field, &icu::Calendar::getGreatestMinimum, "intlcal_get_greatest_minimum"); } static Variant HHVM_METHOD(IntlCalendar, getLeastMaximum, int64_t field) { return intlcal_field_method(Object{this_}, field, &icu::Calendar::getLeastMaximum, "intlcal_get_least_maximum"); } static Variant HHVM_METHOD(IntlCalendar, getLocale, int64_t localeType) { CAL_FETCH(data, this_, false); if (localeType != ULOC_ACTUAL_LOCALE && localeType != ULOC_VALID_LOCALE) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_get_locale: invalid locale type"); return false; } UErrorCode error = U_ZERO_ERROR; icu::Locale locale = data->calendar()->getLocale((ULocDataLocaleType)localeType, error); if (U_FAILURE(error)) { data->setError(error, "intlcal_get_locale: Call to ICU method has failed"); return false; } return String(locale.getName(), CopyString); } static Variant HHVM_METHOD(IntlCalendar, getMaximum, int64_t field) { return intlcal_field_method(Object{this_}, field, &icu::Calendar::getMaximum, "intlcal_get_maximum"); } static Variant HHVM_METHOD(IntlCalendar, getMinimalDaysInFirstWeek) { CAL_FETCH(data, this_, false); uint64_t ret = data->calendar()->getMinimalDaysInFirstWeek(); return ret; } static Variant HHVM_METHOD(IntlCalendar, getMinimum, int64_t field) { return intlcal_field_method(Object{this_}, field, &icu::Calendar::getMinimum, "intlcal_get_maximum"); } static double HHVM_STATIC_METHOD(IntlCalendar, getNow) { return icu::Calendar::getNow(); } static Variant HHVM_METHOD(IntlCalendar, getTime) { CAL_FETCH(data, this_, false); UErrorCode error = U_ZERO_ERROR; UDate ret = data->calendar()->getTime(error); if (U_FAILURE(error)) { data->setError(error, "intlcal_get_time: error calling " "ICU Calendar::getTime"); return false; } return (double)ret; } static Object HHVM_METHOD(IntlCalendar, getTimeZone) { CAL_FETCH(data, this_, Object()); return IntlTimeZone::newInstance( data->calendar()->getTimeZone().clone()); } static Variant HHVM_METHOD(IntlCalendar, getType) { CAL_FETCH(data, this_, false); return String(data->calendar()->getType(), CopyString); } static bool HHVM_METHOD(IntlCalendar, inDaylightTime) { CAL_FETCH(data, this_, false); UErrorCode error = U_ZERO_ERROR; UBool ret = data->calendar()->inDaylightTime(error); if (U_FAILURE(error)) { data->setError(error, "intlcal_in_daylight_time: " "Error calling ICU method"); return false; } return ret; } static bool HHVM_METHOD(IntlCalendar, isEquivalentTo, const Object& other) { CAL_FETCH(obj1, this_, false); CAL_FETCH(obj2, other.get(), false); return obj1->calendar()->isEquivalentTo(*obj2->calendar()); } static bool HHVM_METHOD(IntlCalendar, isLenient) { CAL_FETCH(data, this_, false); return data->calendar()->isLenient(); } static bool HHVM_METHOD(IntlCalendar, isSet, int64_t field) { CAL_FETCH(data, this_, false); CAL_CHECK_FIELD(field, "intlcal_is_set"); return data->calendar()->isSet((UCalendarDateFields)field); } static bool HHVM_METHOD(IntlCalendar, roll, int64_t field, const Variant& value) { CAL_FETCH(data, this_, false); CAL_CHECK_FIELD(field, "intlcal_roll"); UErrorCode error = U_ZERO_ERROR; if (value.isBoolean()) { data->calendar()->roll((UCalendarDateFields)field, (UBool)value.toBoolean(), error); } else { data->calendar()->roll((UCalendarDateFields)field, (int32_t)value.toInt64(), error); } if (U_FAILURE(error)) { data->setError(error, "intlcal_roll: Error calling ICU Calendar::roll"); return false; } return true; } static bool HHVM_METHOD(IntlCalendar, set, int64_t yearOrField, int64_t monthOrValue, const Variant& dayOfMonth, const Variant& hour, const Variant& minute, const Variant& second) { CAL_FETCH(data, this_, false); if (dayOfMonth.isNull()) { // we only got 2 args, so the first 2 args are a field and a value CAL_CHECK_FIELD(yearOrField, "intcal_set"); data->calendar()->set((UCalendarDateFields)yearOrField, (int32_t)monthOrValue); return true; } auto outofrange = [&]() { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_set: at least one of the arguments has an " "absolute value that is too large"); return false; }; if ((yearOrField < INT32_MIN) || (yearOrField > INT32_MAX)) { return outofrange(); } auto y = (int32_t)yearOrField; if ((monthOrValue < INT32_MIN) || (monthOrValue > INT32_MAX)) { return outofrange(); } auto m = (int32_t)monthOrValue; assertx(dayOfMonth.isInteger()); if ((dayOfMonth.asInt64Val() < INT32_MIN) || (dayOfMonth.asInt64Val() > INT32_MAX)) { return outofrange(); } auto d = (int32_t)dayOfMonth.asInt64Val(); if (hour.isNull()) { data->calendar()->set(y, m, d); return true; } assertx(hour.isInteger()); if ((hour.asInt64Val() < INT32_MIN) || (hour.asInt64Val() > INT32_MAX)) { return outofrange(); } auto h = (int32_t)hour.asInt64Val(); if (minute.isNull()) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_set: bad arguments"); return false; } assertx(minute.isInteger()); if ((minute.asInt64Val() < INT32_MIN) || (minute.asInt64Val() > INT32_MAX)) { return outofrange(); } auto i = (int32_t)minute.asInt64Val(); if (second.isNull()) { data->calendar()->set(y, m, d, h, i); return true; } assertx(second.isInteger()); if ((second.asInt64Val() < INT32_MIN) || (second.asInt64Val() > INT32_MAX)) { return outofrange(); } auto s = (int32_t)second.asInt64Val(); data->calendar()->set(y, m, d, h, i, s); return true; } static bool HHVM_METHOD(IntlCalendar, setFirstDayOfWeek, int64_t dow) { CAL_FETCH(data, this_, false); if ((dow < UCAL_SUNDAY) || (dow > UCAL_SATURDAY)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_set_first_day_of_week: invalid day of week"); return false; } data->calendar()->setFirstDayOfWeek((UCalendarDaysOfWeek)dow); return true; } static bool HHVM_METHOD(IntlCalendar, setLenient, bool isLenient) { CAL_FETCH(data, this_, false); data->calendar()->setLenient((UBool)isLenient); return true; } static bool HHVM_METHOD(IntlCalendar, setMinimalDaysInFirstWeek, int64_t days) { CAL_FETCH(data, this_, false); if ((days < 1) || (days > 7)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_set_minimal_days_in_first_week: " "invalid number of days; must be between 1 and 7"); return false; } data->calendar()->setMinimalDaysInFirstWeek((uint8_t)days); return true; } static bool HHVM_METHOD(IntlCalendar, setTime, const Variant& date) { CAL_FETCH(data, this_, false); UErrorCode error = U_ZERO_ERROR; data->calendar()->setTime((UDate)date.toDouble(), error); if (U_FAILURE(error)) { data->setError(error, "Call to underlying method failed"); return false; } return true; } static bool HHVM_METHOD(IntlCalendar, setTimeZone, const Variant& timeZone) { CAL_FETCH(data, this_, false); auto tz = IntlTimeZone::ParseArg(timeZone, "intlcal_set_time_zone", data); if (!tz) { // error already set return false; } data->calendar()->adoptTimeZone(tz); return true; } #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 402 static Variant HHVM_STATIC_METHOD(IntlCalendar, getKeywordValuesForLocale, const String& key, const String& locale, bool common) { UErrorCode error = U_ZERO_ERROR; UEnumeration *uenum = ucal_getKeywordValuesForLocale(key.c_str(), localeOrDefault(locale).c_str(), common, &error); if (U_FAILURE(error)) { if (uenum) { uenum_close(uenum); } s_intl_error->setError(error, "intlcal_get_keyword_values_for_locale: " "error calling underlying method"); return false; } return IntlIterator::newInstance(new BugStringCharEnumeration(uenum)); } #endif // ICU 4.2 #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 404 static Variant HHVM_METHOD(IntlCalendar, getDayOfWeekType, int64_t dow) { CAL_FETCH(data, this_, false); if ((dow < UCAL_SUNDAY) || (dow > UCAL_SATURDAY)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_get_day_of_week_type: invalid day of week"); return false; } UErrorCode error = U_ZERO_ERROR; int64_t ret = data->calendar()->getDayOfWeekType( (UCalendarDaysOfWeek)dow, error); if (U_FAILURE(error)) { data->setError(error, "intlcal_get_day_of_week_type: " "Call to ICU method has failed"); return false; } return ret; } static Variant HHVM_METHOD(IntlCalendar, getWeekendTransition, int64_t dow) { CAL_FETCH(data, this_, false); if ((dow < UCAL_SUNDAY) || (dow > UCAL_SATURDAY)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_get_weekend_transition: " "invalid day of week"); return false; } UErrorCode error = U_ZERO_ERROR; int64_t ret = data->calendar()->getWeekendTransition((UCalendarDaysOfWeek)dow, error); if (U_FAILURE(error)) { data->setError(error, "intlcal_get_weekend_transition: " "Error calling ICU method"); return false; } return ret; } static bool HHVM_METHOD(IntlCalendar, isWeekend, const Variant& date) { CAL_FETCH(data, this_, false); if (date.isNull()) { return data->calendar()->isWeekend(); } UErrorCode error = U_ZERO_ERROR; bool ret = data->calendar()->isWeekend((UDate)date.toDouble(), error); if (U_FAILURE(error)) { data->setError(error, "intlcal_is_weekend: Error calling ICU method"); return false; } return ret; } #endif // ICU 4.4 #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 409 static int64_t HHVM_METHOD(IntlCalendar, getRepeatedWallTimeOption) { CAL_FETCH(data, this_, 0); return (int64_t)data->calendar()->getRepeatedWallTimeOption(); } static int64_t HHVM_METHOD(IntlCalendar, getSkippedWallTimeOption) { CAL_FETCH(data, this_, 0); return (int64_t)data->calendar()->getSkippedWallTimeOption(); } static bool HHVM_METHOD(IntlCalendar, setRepeatedWallTimeOption, int64_t option) { CAL_FETCH(data, this_, false); if ((option != UCAL_WALLTIME_FIRST) && (option != UCAL_WALLTIME_LAST)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_set_repeated_wall_time_option: invalid option"); return false; } data->calendar()->setRepeatedWallTimeOption((UCalendarWallTimeOption)option); return true; } static bool HHVM_METHOD(IntlCalendar, setSkippedWallTimeOption, int64_t option) { CAL_FETCH(data, this_, false); if ((option != UCAL_WALLTIME_FIRST) && (option != UCAL_WALLTIME_LAST)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlcal_set_repeated_wall_time_option: invalid option"); return false; } data->calendar()->setSkippedWallTimeOption((UCalendarWallTimeOption)option); return true; } #endif // ICU 4.9 ///////////////////////////////////////////////////////////////////////////// // IntlGregorianCalendar static void HHVM_METHOD(IntlGregorianCalendar, __construct, const Variant& yearOrTz, const Variant& monthOrLocale, const Variant& day, const Variant& hour, const Variant& minute, const Variant& second) { icu::GregorianCalendar *gcal = nullptr; SCOPE_EXIT { if (gcal) { delete gcal; } }; icu::TimeZone *tz = nullptr; SCOPE_EXIT { if (tz) { delete tz; } }; auto success = [&]() { assertx(gcal); // tz should be owned by gcal, so we should have cleared our local copy assertx(!tz); Native::data<IntlCalendar>(this_)->setCalendar(gcal); gcal = nullptr; }; if (day.isNull()) { // we have 2 args, tz and locale tz = IntlTimeZone::ParseArg(yearOrTz, "intlgregcal_create_instance", s_intl_error.get()); auto error = U_ZERO_ERROR; gcal = new icu::GregorianCalendar( tz, icu::Locale::createFromName( localeOrDefault(monthOrLocale.toString()).c_str() ), error ); if (gcal) tz = nullptr; // gcal owns tz now if (U_FAILURE(error)) { s_intl_error->setError(error, "intlgregcal_create_instance: error " "creating ICU GregorianCalendar from time zone " "and locale"); return; } return success(); } auto arg_ok = true; auto coerce_arg = [&](const Variant& v) { int64_t arg = v.toInt64(); if ((arg < INT32_MIN) || (arg > INT32_MAX)) { s_intl_error->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlgregcal_create_instance: at least one of " "the arguments has an absolute value that is " "too large"); arg_ok = false; return 0; } return (int32_t)arg; }; auto y = coerce_arg(yearOrTz); if (!arg_ok) return; auto m = coerce_arg(monthOrLocale); if (!arg_ok) return; auto d = coerce_arg(day); if (!arg_ok) return; auto error = U_ZERO_ERROR; auto finish = [&]() { if (U_FAILURE(error)) { s_intl_error->setError(error, "intlgregcal_create_instance: error " "creating ICU GregorianCalendar from date"); return; } assertx(gcal); tz = IntlTimeZone::ParseArg(uninit_null(), "intlgregcal_create_instance", s_intl_error.get()); if (!tz) { // error already set return; } gcal->adoptTimeZone(tz); tz = nullptr; // gcal owns tz now return success(); }; if (hour.isNull()) { gcal = new icu::GregorianCalendar(y, m, d, error); return finish(); } auto h = coerce_arg(hour); if (!arg_ok) return; if (minute.isNull()) { s_intl_error->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlgregcal_create_instance: no variant with " "4 arguments (excluding trailing NULLs)"); return; } auto i = coerce_arg(minute); if (!arg_ok) return; if (second.isNull()) { gcal = new icu::GregorianCalendar(y, m, d, h, i, error); return finish(); } auto s = coerce_arg(second); if (!arg_ok) return; gcal = new icu::GregorianCalendar(y, m, d, h, i, s, error); return finish(); } static bool HHVM_METHOD(IntlGregorianCalendar, isLeapYear, int64_t year) { GCAL_FETCH(data, this_, false); if ((year < INT32_MIN) || (year > INT32_MAX)) { data->setError(U_ILLEGAL_ARGUMENT_ERROR, "intlgregcal_is_leap_year: year out of bounds"); return false; } return (bool)data->gcal()->isLeapYear((int32_t)year); } static double HHVM_METHOD(IntlGregorianCalendar, getGregorianChange) { GCAL_FETCH(data, this_, 0.0); return (double)data->gcal()->getGregorianChange(); } static bool HHVM_METHOD(IntlGregorianCalendar, setGregorianChange, double change) { GCAL_FETCH(data, this_, false); UErrorCode error = U_ZERO_ERROR; data->gcal()->setGregorianChange(change, error); if (U_FAILURE(error)) { data->setError(error, "intlgregcal_set_gregorian_change: error " "calling ICU method"); return false; } return true; } ///////////////////////////////////////////////////////////////////////////// // Extension void IntlExtension::initCalendar() { HHVM_RCC_INT(IntlCalendar, FIELD_ERA, UCAL_ERA); HHVM_RCC_INT(IntlCalendar, FIELD_YEAR, UCAL_YEAR); HHVM_RCC_INT(IntlCalendar, FIELD_MONTH, UCAL_MONTH); HHVM_RCC_INT(IntlCalendar, FIELD_WEEK_OF_YEAR, UCAL_WEEK_OF_YEAR); HHVM_RCC_INT(IntlCalendar, FIELD_WEEK_OF_MONTH, UCAL_WEEK_OF_MONTH); HHVM_RCC_INT(IntlCalendar, FIELD_DATE, UCAL_DATE); HHVM_RCC_INT(IntlCalendar, FIELD_DAY_OF_YEAR, UCAL_DAY_OF_YEAR); HHVM_RCC_INT(IntlCalendar, FIELD_DAY_OF_WEEK, UCAL_DAY_OF_WEEK); HHVM_RCC_INT(IntlCalendar, FIELD_DAY_OF_WEEK_IN_MONTH, UCAL_DAY_OF_WEEK_IN_MONTH); HHVM_RCC_INT(IntlCalendar, FIELD_AM_PM, UCAL_AM_PM); HHVM_RCC_INT(IntlCalendar, FIELD_HOUR, UCAL_HOUR); HHVM_RCC_INT(IntlCalendar, FIELD_HOUR_OF_DAY, UCAL_HOUR_OF_DAY); HHVM_RCC_INT(IntlCalendar, FIELD_MINUTE, UCAL_MINUTE); HHVM_RCC_INT(IntlCalendar, FIELD_SECOND, UCAL_SECOND); HHVM_RCC_INT(IntlCalendar, FIELD_MILLISECOND, UCAL_MILLISECOND); HHVM_RCC_INT(IntlCalendar, FIELD_ZONE_OFFSET, UCAL_ZONE_OFFSET); HHVM_RCC_INT(IntlCalendar, FIELD_DST_OFFSET, UCAL_DST_OFFSET); HHVM_RCC_INT(IntlCalendar, FIELD_YEAR_WOY, UCAL_YEAR_WOY); HHVM_RCC_INT(IntlCalendar, FIELD_DOW_LOCAL, UCAL_DOW_LOCAL); HHVM_RCC_INT(IntlCalendar, FIELD_EXTENDED_YEAR, UCAL_EXTENDED_YEAR); HHVM_RCC_INT(IntlCalendar, FIELD_JULIAN_DAY, UCAL_JULIAN_DAY); HHVM_RCC_INT(IntlCalendar, FIELD_MILLISECONDS_IN_DAY, UCAL_MILLISECONDS_IN_DAY); HHVM_RCC_INT(IntlCalendar, FIELD_IS_LEAP_MONTH, UCAL_IS_LEAP_MONTH); HHVM_RCC_INT(IntlCalendar, FIELD_FIELD_COUNT, UCAL_FIELD_COUNT); HHVM_RCC_INT(IntlCalendar, FIELD_DAY_OF_MONTH, UCAL_DAY_OF_MONTH); HHVM_RCC_INT(IntlCalendar, DOW_SUNDAY, UCAL_SUNDAY); HHVM_RCC_INT(IntlCalendar, DOW_MONDAY, UCAL_MONDAY); HHVM_RCC_INT(IntlCalendar, DOW_TUESDAY, UCAL_TUESDAY); HHVM_RCC_INT(IntlCalendar, DOW_WEDNESDAY, UCAL_WEDNESDAY); HHVM_RCC_INT(IntlCalendar, DOW_THURSDAY, UCAL_THURSDAY); HHVM_RCC_INT(IntlCalendar, DOW_FRIDAY, UCAL_FRIDAY); HHVM_RCC_INT(IntlCalendar, DOW_SATURDAY, UCAL_SATURDAY); #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 404 HHVM_RCC_INT(IntlCalendar, DOW_TYPE_WEEKDAY, UCAL_WEEKDAY); HHVM_RCC_INT(IntlCalendar, DOW_TYPE_WEEKEND, UCAL_WEEKEND); HHVM_RCC_INT(IntlCalendar, DOW_TYPE_WEEKEND_CEASE, UCAL_WEEKEND_CEASE); // Not a typo: Zend defines OFFSET as ONSET HHVM_RCC_INT(IntlCalendar, DOW_TYPE_WEEKEND_OFFSET, UCAL_WEEKEND_ONSET); #endif #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 409 HHVM_RCC_INT(IntlCalendar, WALLTIME_FIRST, UCAL_WALLTIME_FIRST); HHVM_RCC_INT(IntlCalendar, WALLTIME_LAST, UCAL_WALLTIME_LAST); HHVM_RCC_INT(IntlCalendar, WALLTIME_NEXT_VALID, UCAL_WALLTIME_NEXT_VALID); #endif HHVM_ME(IntlCalendar, add); HHVM_ME(IntlCalendar, after); HHVM_ME(IntlCalendar, before); HHVM_ME(IntlCalendar, clear); HHVM_STATIC_ME(IntlCalendar, createInstance); HHVM_ME(IntlCalendar, equals); HHVM_ME(IntlCalendar, fieldDifference); HHVM_ME(IntlCalendar, get); HHVM_ME(IntlCalendar, getActualMaximum); HHVM_ME(IntlCalendar, getActualMinimum); HHVM_STATIC_ME(IntlCalendar, getAvailableLocales); HHVM_ME(IntlCalendar, getErrorCode); HHVM_ME(IntlCalendar, getErrorMessage); HHVM_ME(IntlCalendar, getFirstDayOfWeek); HHVM_ME(IntlCalendar, getGreatestMinimum); HHVM_ME(IntlCalendar, getLeastMaximum); HHVM_ME(IntlCalendar, getLocale); HHVM_ME(IntlCalendar, getMaximum); HHVM_ME(IntlCalendar, getMinimalDaysInFirstWeek); HHVM_ME(IntlCalendar, getMinimum); HHVM_STATIC_ME(IntlCalendar, getNow); HHVM_ME(IntlCalendar, getTime); HHVM_ME(IntlCalendar, getTimeZone); HHVM_ME(IntlCalendar, getType); HHVM_ME(IntlCalendar, inDaylightTime); HHVM_ME(IntlCalendar, isEquivalentTo); HHVM_ME(IntlCalendar, isLenient); HHVM_ME(IntlCalendar, isSet); HHVM_ME(IntlCalendar, roll); HHVM_ME(IntlCalendar, set); HHVM_ME(IntlCalendar, setFirstDayOfWeek); HHVM_ME(IntlCalendar, setLenient); HHVM_ME(IntlCalendar, setMinimalDaysInFirstWeek); HHVM_ME(IntlCalendar, setTime); HHVM_ME(IntlCalendar, setTimeZone); #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 402 HHVM_STATIC_ME(IntlCalendar, getKeywordValuesForLocale); #endif #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 404 HHVM_ME(IntlCalendar, getDayOfWeekType); HHVM_ME(IntlCalendar, getWeekendTransition); HHVM_ME(IntlCalendar, isWeekend); #endif #if ((U_ICU_VERSION_MAJOR_NUM * 100) + U_ICU_VERSION_MINOR_NUM) >= 409 HHVM_ME(IntlCalendar, getRepeatedWallTimeOption); HHVM_ME(IntlCalendar, getSkippedWallTimeOption); HHVM_ME(IntlCalendar, setRepeatedWallTimeOption); HHVM_ME(IntlCalendar, setSkippedWallTimeOption); #endif HHVM_ME(IntlGregorianCalendar, __construct); HHVM_ME(IntlGregorianCalendar, isLeapYear); HHVM_ME(IntlGregorianCalendar, getGregorianChange); HHVM_ME(IntlGregorianCalendar, setGregorianChange); Native::registerNativeDataInfo<IntlCalendar>(s_IntlCalendar.get()); loadSystemlib("icu_calendar"); } ///////////////////////////////////////////////////////////////////////////// }} // namespace HPHP::Intl
{ "pile_set_name": "Github" }
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="24dp" android:height="24dp" android:viewportWidth="24" android:viewportHeight="24"> <path android:pathData="M12,13C12.5523,13 13,12.5523 13,12C13,11.4477 12.5523,11 12,11C11.4477,11 11,11.4477 11,12C11,12.5523 11.4477,13 12,13Z" android:fillColor="#ffffff"/> <path android:pathData="M22,12C22,17.5228 17.5228,22 12,22C6.4771,22 2,17.5228 2,12C2,6.4771 6.4771,2 12,2C17.5228,2 22,6.4771 22,12ZM6,18L10,10L18,6L14,14L6,18Z" android:fillColor="#ffffff" android:fillType="evenOdd"/> </vector>
{ "pile_set_name": "Github" }
var convert = require('./convert'), func = convert('methodOf', require('../methodOf')); func.placeholder = require('./placeholder'); module.exports = func;
{ "pile_set_name": "Github" }
# O3X - trim unwanted linux support code #SUBDIRS = bash_completion.d dracut initramfs #DIST_SUBDIRS = bash_completion.d dracut initramfs SUBDIRS = bash_completion.d DIST_SUBDIRS = bash_completion.d
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.5.0_08) on Tue Jun 03 16:15:27 GMT-05:00 2008 --> <TITLE> Uses of Class soot.jimple.internal.JReturnStmt (Soot API) </TITLE> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { parent.document.title="Uses of Class soot.jimple.internal.JReturnStmt (Soot API)"; } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../soot/jimple/internal/JReturnStmt.html" title="class in soot.jimple.internal"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?soot/jimple/internal//class-useJReturnStmt.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="JReturnStmt.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Class<br>soot.jimple.internal.JReturnStmt</B></H2> </CENTER> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> Packages that use <A HREF="../../../../soot/jimple/internal/JReturnStmt.html" title="class in soot.jimple.internal">JReturnStmt</A></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><A HREF="#soot.grimp.internal"><B>soot.grimp.internal</B></A></TD> <TD>Internal, messy, implementation-specific classes for the Grimp intermediate representation.&nbsp;</TD> </TR> </TABLE> &nbsp; <P> <A NAME="soot.grimp.internal"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> Uses of <A HREF="../../../../soot/jimple/internal/JReturnStmt.html" title="class in soot.jimple.internal">JReturnStmt</A> in <A HREF="../../../../soot/grimp/internal/package-summary.html">soot.grimp.internal</A></FONT></TH> </TR> </TABLE> &nbsp; <P> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../soot/jimple/internal/JReturnStmt.html" title="class in soot.jimple.internal">JReturnStmt</A> in <A HREF="../../../../soot/grimp/internal/package-summary.html">soot.grimp.internal</A></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;class</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../soot/grimp/internal/GReturnStmt.html" title="class in soot.grimp.internal">GReturnStmt</A></B></CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> </TABLE> &nbsp; <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../soot/jimple/internal/JReturnStmt.html" title="class in soot.jimple.internal"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?soot/jimple/internal//class-useJReturnStmt.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="JReturnStmt.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
{ "pile_set_name": "Github" }
<!-- Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 --> <property name="module" value="ejb-ejb31-ejblite-javamodule"/> <property name="appname" value="${module}"/> <property name="jndiroot" value="${appname}-ejb"/> <property name="assemble" value="${build.classes.dir}/archive"/> <property name="se.client" value="com.acme.Client"/> <property name="contextroot" value="${module}"/>
{ "pile_set_name": "Github" }
Pod::Spec.new do |s| s.name = "GDPerformanceView" s.version = "1.3.1" s.summary = "Shows FPS, CPU usage, app and iOS versions above the status bar and report FPS and CPU usage via delegate." s.homepage = "https://github.com/dani-gavrilov/GDPerformanceView" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Gavrilov Daniil" => "[email protected]" } s.platform = :ios, "8.0" s.ios.deployment_target = "8.0" s.source = { :git => "https://github.com/dani-gavrilov/GDPerformanceView.git", :tag => "1.3.1" } s.source_files = "GDPerformanceView/GDPerformanceMonitoring/*" s.frameworks = "UIKit", "Foundation", "QuartzCore" s.requires_arc = true end
{ "pile_set_name": "Github" }
RUN: llvm-objdump -r %p/Inputs/elf-reloc-no-sym.x86_64 \ RUN: | FileCheck %s ; CHECK: elf-reloc-no-sym.x86_64: file format ELF64-x86-64 ; CHECK-NOT: {{.}}
{ "pile_set_name": "Github" }
/* * This file is part of the SDWebImage package. * (c) Olivier Poitrey <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ #import <Foundation/Foundation.h> #import "SDWebImageCompat.h" #import "SDWebImageOperation.h" typedef NS_OPTIONS(NSUInteger, SDWebImageDownloaderOptions) { /** * Put the download in the low queue priority and task priority. */ SDWebImageDownloaderLowPriority = 1 << 0, /** * This flag enables progressive download, the image is displayed progressively during download as a browser would do. */ SDWebImageDownloaderProgressiveDownload = 1 << 1, /** * By default, request prevent the use of NSURLCache. With this flag, NSURLCache * is used with default policies. */ SDWebImageDownloaderUseNSURLCache = 1 << 2, /** * Call completion block with nil image/imageData if the image was read from NSURLCache * (to be combined with `SDWebImageDownloaderUseNSURLCache`). */ SDWebImageDownloaderIgnoreCachedResponse = 1 << 3, /** * In iOS 4+, continue the download of the image if the app goes to background. This is achieved by asking the system for * extra time in background to let the request finish. If the background task expires the operation will be cancelled. */ SDWebImageDownloaderContinueInBackground = 1 << 4, /** * Handles cookies stored in NSHTTPCookieStore by setting * NSMutableURLRequest.HTTPShouldHandleCookies = YES; */ SDWebImageDownloaderHandleCookies = 1 << 5, /** * Enable to allow untrusted SSL certificates. * Useful for testing purposes. Use with caution in production. */ SDWebImageDownloaderAllowInvalidSSLCertificates = 1 << 6, /** * Put the download in the high queue priority and task priority. */ SDWebImageDownloaderHighPriority = 1 << 7, /** * Scale down the image */ SDWebImageDownloaderScaleDownLargeImages = 1 << 8, }; typedef NS_ENUM(NSInteger, SDWebImageDownloaderExecutionOrder) { /** * Default value. All download operations will execute in queue style (first-in-first-out). */ SDWebImageDownloaderFIFOExecutionOrder, /** * All download operations will execute in stack style (last-in-first-out). */ SDWebImageDownloaderLIFOExecutionOrder }; FOUNDATION_EXPORT NSString * _Nonnull const SDWebImageDownloadStartNotification; FOUNDATION_EXPORT NSString * _Nonnull const SDWebImageDownloadStopNotification; typedef void(^SDWebImageDownloaderProgressBlock)(NSInteger receivedSize, NSInteger expectedSize, NSURL * _Nullable targetURL); typedef void(^SDWebImageDownloaderCompletedBlock)(UIImage * _Nullable image, NSData * _Nullable data, NSError * _Nullable error, BOOL finished); typedef NSDictionary<NSString *, NSString *> SDHTTPHeadersDictionary; typedef NSMutableDictionary<NSString *, NSString *> SDHTTPHeadersMutableDictionary; typedef SDHTTPHeadersDictionary * _Nullable (^SDWebImageDownloaderHeadersFilterBlock)(NSURL * _Nullable url, SDHTTPHeadersDictionary * _Nullable headers); /** * A token associated with each download. Can be used to cancel a download */ @interface SDWebImageDownloadToken : NSObject <SDWebImageOperation> /** The download's URL. This should be readonly and you should not modify */ @property (nonatomic, strong, nullable) NSURL *url; /** The cancel token taken from `addHandlersForProgress:completed`. This should be readonly and you should not modify @note use `-[SDWebImageDownloadToken cancel]` to cancel the token */ @property (nonatomic, strong, nullable) id downloadOperationCancelToken; @end /** * Asynchronous downloader dedicated and optimized for image loading. */ @interface SDWebImageDownloader : NSObject /** * Decompressing images that are downloaded and cached can improve performance but can consume lot of memory. * Defaults to YES. Set this to NO if you are experiencing a crash due to excessive memory consumption. */ @property (assign, nonatomic) BOOL shouldDecompressImages; /** * The maximum number of concurrent downloads */ @property (assign, nonatomic) NSInteger maxConcurrentDownloads; /** * Shows the current amount of downloads that still need to be downloaded */ @property (readonly, nonatomic) NSUInteger currentDownloadCount; /** * The timeout value (in seconds) for the download operation. Default: 15.0. */ @property (assign, nonatomic) NSTimeInterval downloadTimeout; /** * The configuration in use by the internal NSURLSession. * Mutating this object directly has no effect. * * @see createNewSessionWithConfiguration: */ @property (readonly, nonatomic, nonnull) NSURLSessionConfiguration *sessionConfiguration; /** * Changes download operations execution order. Default value is `SDWebImageDownloaderFIFOExecutionOrder`. */ @property (assign, nonatomic) SDWebImageDownloaderExecutionOrder executionOrder; /** * Singleton method, returns the shared instance * * @return global shared instance of downloader class */ + (nonnull instancetype)sharedDownloader; /** * Set the default URL credential to be set for request operations. */ @property (strong, nonatomic, nullable) NSURLCredential *urlCredential; /** * Set username */ @property (strong, nonatomic, nullable) NSString *username; /** * Set password */ @property (strong, nonatomic, nullable) NSString *password; /** * Set filter to pick headers for downloading image HTTP request. * * This block will be invoked for each downloading image request, returned * NSDictionary will be used as headers in corresponding HTTP request. */ @property (nonatomic, copy, nullable) SDWebImageDownloaderHeadersFilterBlock headersFilter; /** * Creates an instance of a downloader with specified session configuration. * @note `timeoutIntervalForRequest` is going to be overwritten. * @return new instance of downloader class */ - (nonnull instancetype)initWithSessionConfiguration:(nullable NSURLSessionConfiguration *)sessionConfiguration NS_DESIGNATED_INITIALIZER; /** * Set a value for a HTTP header to be appended to each download HTTP request. * * @param value The value for the header field. Use `nil` value to remove the header. * @param field The name of the header field to set. */ - (void)setValue:(nullable NSString *)value forHTTPHeaderField:(nullable NSString *)field; /** * Returns the value of the specified HTTP header field. * * @return The value associated with the header field field, or `nil` if there is no corresponding header field. */ - (nullable NSString *)valueForHTTPHeaderField:(nullable NSString *)field; /** * Sets a subclass of `SDWebImageDownloaderOperation` as the default * `NSOperation` to be used each time SDWebImage constructs a request * operation to download an image. * * @param operationClass The subclass of `SDWebImageDownloaderOperation` to set * as default. Passing `nil` will revert to `SDWebImageDownloaderOperation`. */ - (void)setOperationClass:(nullable Class)operationClass; /** * Creates a SDWebImageDownloader async downloader instance with a given URL * * The delegate will be informed when the image is finish downloaded or an error has happen. * * @see SDWebImageDownloaderDelegate * * @param url The URL to the image to download * @param options The options to be used for this download * @param progressBlock A block called repeatedly while the image is downloading * @note the progress block is executed on a background queue * @param completedBlock A block called once the download is completed. * If the download succeeded, the image parameter is set, in case of error, * error parameter is set with the error. The last parameter is always YES * if SDWebImageDownloaderProgressiveDownload isn't use. With the * SDWebImageDownloaderProgressiveDownload option, this block is called * repeatedly with the partial image object and the finished argument set to NO * before to be called a last time with the full image and finished argument * set to YES. In case of error, the finished argument is always YES. * * @return A token (SDWebImageDownloadToken) that can be passed to -cancel: to cancel this operation */ - (nullable SDWebImageDownloadToken *)downloadImageWithURL:(nullable NSURL *)url options:(SDWebImageDownloaderOptions)options progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock completed:(nullable SDWebImageDownloaderCompletedBlock)completedBlock; /** * Cancels a download that was previously queued using -downloadImageWithURL:options:progress:completed: * * @param token The token received from -downloadImageWithURL:options:progress:completed: that should be canceled. */ - (void)cancel:(nullable SDWebImageDownloadToken *)token; /** * Sets the download queue suspension state */ - (void)setSuspended:(BOOL)suspended; /** * Cancels all download operations in the queue */ - (void)cancelAllDownloads; /** * Forces SDWebImageDownloader to create and use a new NSURLSession that is * initialized with the given configuration. * @note All existing download operations in the queue will be cancelled. * @note `timeoutIntervalForRequest` is going to be overwritten. * * @param sessionConfiguration The configuration to use for the new NSURLSession */ - (void)createNewSessionWithConfiguration:(nonnull NSURLSessionConfiguration *)sessionConfiguration; /** * Invalidates the managed session, optionally canceling pending operations. * @note If you use custom downloader instead of the shared downloader, you need call this method when you do not use it to avoid memory leak * @param cancelPendingOperations Whether or not to cancel pending operations. * @note Calling this method on the shared downloader has no effect. */ - (void)invalidateSessionAndCancel:(BOOL)cancelPendingOperations; @end
{ "pile_set_name": "Github" }
var addSorting = (function () { "use strict"; var cols, currentSort = { index: 0, desc: false }; // returns the summary table element function getTable() { return document.querySelector('.coverage-summary table'); } // returns the thead element of the summary table function getTableHeader() { return getTable().querySelector('thead tr'); } // returns the tbody element of the summary table function getTableBody() { return getTable().querySelector('tbody'); } // returns the th element for nth column function getNthColumn(n) { return getTableHeader().querySelectorAll('th')[n]; } // loads all columns function loadColumns() { var colNodes = getTableHeader().querySelectorAll('th'), colNode, cols = [], col, i; for (i = 0; i < colNodes.length; i += 1) { colNode = colNodes[i]; col = { key: colNode.getAttribute('data-col'), sortable: !colNode.getAttribute('data-nosort'), type: colNode.getAttribute('data-type') || 'string' }; cols.push(col); if (col.sortable) { col.defaultDescSort = col.type === 'number'; colNode.innerHTML = colNode.innerHTML + '<span class="sorter"></span>'; } } return cols; } // attaches a data attribute to every tr element with an object // of data values keyed by column name function loadRowData(tableRow) { var tableCols = tableRow.querySelectorAll('td'), colNode, col, data = {}, i, val; for (i = 0; i < tableCols.length; i += 1) { colNode = tableCols[i]; col = cols[i]; val = colNode.getAttribute('data-value'); if (col.type === 'number') { val = Number(val); } data[col.key] = val; } return data; } // loads all row data function loadData() { var rows = getTableBody().querySelectorAll('tr'), i; for (i = 0; i < rows.length; i += 1) { rows[i].data = loadRowData(rows[i]); } } // sorts the table using the data for the ith column function sortByIndex(index, desc) { var key = cols[index].key, sorter = function (a, b) { a = a.data[key]; b = b.data[key]; return a < b ? -1 : a > b ? 1 : 0; }, finalSorter = sorter, tableBody = document.querySelector('.coverage-summary tbody'), rowNodes = tableBody.querySelectorAll('tr'), rows = [], i; if (desc) { finalSorter = function (a, b) { return -1 * sorter(a, b); }; } for (i = 0; i < rowNodes.length; i += 1) { rows.push(rowNodes[i]); tableBody.removeChild(rowNodes[i]); } rows.sort(finalSorter); for (i = 0; i < rows.length; i += 1) { tableBody.appendChild(rows[i]); } } // removes sort indicators for current column being sorted function removeSortIndicators() { var col = getNthColumn(currentSort.index), cls = col.className; cls = cls.replace(/ sorted$/, '').replace(/ sorted-desc$/, ''); col.className = cls; } // adds sort indicators for current column being sorted function addSortIndicators() { getNthColumn(currentSort.index).className += currentSort.desc ? ' sorted-desc' : ' sorted'; } // adds event listeners for all sorter widgets function enableUI() { var i, el, ithSorter = function ithSorter(i) { var col = cols[i]; return function () { var desc = col.defaultDescSort; if (currentSort.index === i) { desc = !currentSort.desc; } sortByIndex(i, desc); removeSortIndicators(); currentSort.index = i; currentSort.desc = desc; addSortIndicators(); }; }; for (i =0 ; i < cols.length; i += 1) { if (cols[i].sortable) { el = getNthColumn(i).querySelector('.sorter'); if (el.addEventListener) { el.addEventListener('click', ithSorter(i)); } else { el.attachEvent('onclick', ithSorter(i)); } } } } // adds sorting functionality to the UI return function () { if (!getTable()) { return; } cols = loadColumns(); loadData(cols); addSortIndicators(); enableUI(); }; })(); window.addEventListener('load', addSorting);
{ "pile_set_name": "Github" }
/* * Copyright 2016 Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <folly/portability/SysUio.h> #include <errno.h> #include <stdio.h> #include <folly/ScopeGuard.h> #include <folly/portability/SysFile.h> #include <folly/portability/Unistd.h> template <class F, class... Args> static int wrapPositional(F f, int fd, off_t offset, Args... args) { off_t origLoc = lseek(fd, 0, SEEK_CUR); if (origLoc == off_t(-1)) { return -1; } if (lseek(fd, offset, SEEK_SET) == off_t(-1)) { return -1; } int res = (int)f(fd, args...); int curErrNo = errno; if (lseek(fd, origLoc, SEEK_SET) == off_t(-1)) { if (res == -1) { errno = curErrNo; } return -1; } errno = curErrNo; return res; } #if !FOLLY_HAVE_PREADV extern "C" ssize_t preadv(int fd, const iovec* iov, int count, off_t offset) { return wrapPositional(readv, fd, offset, iov, count); } #endif #if !FOLLY_HAVE_PWRITEV extern "C" ssize_t pwritev(int fd, const iovec* iov, int count, off_t offset) { return wrapPositional(writev, fd, offset, iov, count); } #endif #ifdef _WIN32 template <bool isRead> static ssize_t doVecOperation(int fd, const iovec* iov, int count) { if (!count) { return 0; } if (count < 0 || count > folly::kIovMax) { errno = EINVAL; return -1; } if (lockf(fd, F_LOCK, 0) == -1) { return -1; } SCOPE_EXIT { lockf(fd, F_ULOCK, 0); }; ssize_t bytesProcessed = 0; int curIov = 0; void* curBase = iov[0].iov_base; size_t curLen = iov[0].iov_len; while (curIov < count) { int res = 0; if (isRead) { res = read(fd, curBase, (unsigned int)curLen); if (res == 0 && curLen != 0) { break; // End of File } } else { res = write(fd, curBase, (unsigned int)curLen); // Write of zero bytes is fine. } if (res == -1) { return -1; } if (res == curLen) { curIov++; if (curIov < count) { curBase = iov[curIov].iov_base; curLen = iov[curIov].iov_len; } } else { curBase = (void*)((char*)curBase + res); curLen -= res; } if (bytesProcessed + res < 0) { // Overflow errno = EINVAL; return -1; } bytesProcessed += res; } return bytesProcessed; } extern "C" ssize_t readv(int fd, const iovec* iov, int count) { return doVecOperation<true>(fd, iov, count); } extern "C" ssize_t writev(int fd, const iovec* iov, int count) { return doVecOperation<false>(fd, iov, count); } #endif
{ "pile_set_name": "Github" }
using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Diagnostics; using System.Collections.Immutable; using System.Linq; namespace UnityEngineAnalyzer.StringMethods { [DiagnosticAnalyzer(LanguageNames.CSharp)] public sealed class DoNotUseStringMethodsAnalyzer : DiagnosticAnalyzer { private static readonly ImmutableHashSet<string> StringMethods = ImmutableHashSet.Create("SendMessage", "SendMessageUpwards", "BroadcastMessage", "Invoke", "InvokeRepeating"); private static readonly ImmutableHashSet<string> Namespaces = ImmutableHashSet.Create("UnityEngine.Component", "UnityEngine.GameObject", "UnityEngine.MonoBehaviour"); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(DiagnosticDescriptors.DoNotUseStringMethods); public override void Initialize(AnalysisContext context) { context.RegisterSyntaxNodeAction(AnalyzeNode, SyntaxKind.InvocationExpression); } private static void AnalyzeNode(SyntaxNodeAnalysisContext context) { var invocation = context.Node as InvocationExpressionSyntax; if (invocation == null) { return; } string name = null; if (invocation.Expression is MemberAccessExpressionSyntax) { name = ((MemberAccessExpressionSyntax)invocation.Expression).Name.Identifier.ToString(); } else if (invocation.Expression is IdentifierNameSyntax) { name = ((IdentifierNameSyntax)invocation.Expression).ToString(); } else if (invocation.Expression is GenericNameSyntax) { name = ((GenericNameSyntax)invocation.Expression).Identifier.ToString(); } // check if any of the "string" methods are used if (!StringMethods.Contains(name)) { return; } // check if the method is the one from UnityEngine var symbolInfo = context.SemanticModel.GetSymbolInfo(invocation); var methodSymbol = symbolInfo.Symbol as IMethodSymbol; if (Namespaces.Any(ns => methodSymbol?.ToString().StartsWith(ns) ?? false)) { var diagnostic = Diagnostic.Create(DiagnosticDescriptors.DoNotUseStringMethods, invocation.GetLocation()); context.ReportDiagnostic(diagnostic); } } } }
{ "pile_set_name": "Github" }
# Lines starting with '#' and sections without content # are not displayed by a call to 'details' # [Website] http://forum.ruboard.ru/showthread.php/143139-VSO-Downloader-v.2.7.0.4-%D0%B1%D0%B5%D1%81%D0%BF%D0%BB%D0%B0%D1%82%D0%BD%D1%8B%D0%B9-%D0%B4%D0%B8%D1%81%D0%BF%D0%B5%D1%82%D1%87%D0%B5%D1%80-%D0%B7%D0%B0%D0%BA%D0%B0%D1%87%D0%B5%D0%BA [filters] http://cdn.leadsleader.ru/ll.js [other] # Any other details [comments] fanboy
{ "pile_set_name": "Github" }
package reflect2 import ( "reflect" "runtime" "strings" "sync" "unsafe" ) // typelinks1 for 1.5 ~ 1.6 //go:linkname typelinks1 reflect.typelinks func typelinks1() [][]unsafe.Pointer // typelinks2 for 1.7 ~ //go:linkname typelinks2 reflect.typelinks func typelinks2() (sections []unsafe.Pointer, offset [][]int32) // initOnce guards initialization of types and packages var initOnce sync.Once var types map[string]reflect.Type var packages map[string]map[string]reflect.Type // discoverTypes initializes types and packages func discoverTypes() { types = make(map[string]reflect.Type) packages = make(map[string]map[string]reflect.Type) ver := runtime.Version() if ver == "go1.5" || strings.HasPrefix(ver, "go1.5.") { loadGo15Types() } else if ver == "go1.6" || strings.HasPrefix(ver, "go1.6.") { loadGo15Types() } else { loadGo17Types() } } func loadGo15Types() { var obj interface{} = reflect.TypeOf(0) typePtrss := typelinks1() for _, typePtrs := range typePtrss { for _, typePtr := range typePtrs { (*emptyInterface)(unsafe.Pointer(&obj)).word = typePtr typ := obj.(reflect.Type) if typ.Kind() == reflect.Ptr && typ.Elem().Kind() == reflect.Struct { loadedType := typ.Elem() pkgTypes := packages[loadedType.PkgPath()] if pkgTypes == nil { pkgTypes = map[string]reflect.Type{} packages[loadedType.PkgPath()] = pkgTypes } types[loadedType.String()] = loadedType pkgTypes[loadedType.Name()] = loadedType } if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Ptr && typ.Elem().Elem().Kind() == reflect.Struct { loadedType := typ.Elem().Elem() pkgTypes := packages[loadedType.PkgPath()] if pkgTypes == nil { pkgTypes = map[string]reflect.Type{} packages[loadedType.PkgPath()] = pkgTypes } types[loadedType.String()] = loadedType pkgTypes[loadedType.Name()] = loadedType } } } } func loadGo17Types() { var obj interface{} = reflect.TypeOf(0) sections, offset := typelinks2() for i, offs := range offset { rodata := sections[i] for _, off := range offs { (*emptyInterface)(unsafe.Pointer(&obj)).word = resolveTypeOff(unsafe.Pointer(rodata), off) typ := obj.(reflect.Type) if typ.Kind() == reflect.Ptr && typ.Elem().Kind() == reflect.Struct { loadedType := typ.Elem() pkgTypes := packages[loadedType.PkgPath()] if pkgTypes == nil { pkgTypes = map[string]reflect.Type{} packages[loadedType.PkgPath()] = pkgTypes } types[loadedType.String()] = loadedType pkgTypes[loadedType.Name()] = loadedType } } } } type emptyInterface struct { typ unsafe.Pointer word unsafe.Pointer } // TypeByName return the type by its name, just like Class.forName in java func TypeByName(typeName string) Type { initOnce.Do(discoverTypes) return Type2(types[typeName]) } // TypeByPackageName return the type by its package and name func TypeByPackageName(pkgPath string, name string) Type { initOnce.Do(discoverTypes) pkgTypes := packages[pkgPath] if pkgTypes == nil { return nil } return Type2(pkgTypes[name]) }
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import "WebviewJSEventHandlerBase.h" #import "JSApiSelectContactsViewControllerDelegate-Protocol.h" #import "JSApiSelectSessionViewControllerDelegate-Protocol.h" #import "PBMessageObserverDelegate-Protocol.h" @class CContact, JSApiSelectContactsViewController, JSApiSelectSessionViewController, JSEvent, NSString; @interface WebviewJSEventHandler_selectSingleContact : WebviewJSEventHandlerBase <JSApiSelectContactsViewControllerDelegate, JSApiSelectSessionViewControllerDelegate, PBMessageObserverDelegate> { int m_enReturnValueSignedType; JSApiSelectSessionViewController *m_oSelectSessionViewController; JSApiSelectContactsViewController *m_oSelectContactViewController; JSEvent *m_oJSEvent; CContact *m_oSelectedContact; unsigned int m_uiSelectMode; NSString *m_nsBusiId; NSString *m_nsAppid; } - (void).cxx_destruct; - (void)getBizUserOpenIdRequest; - (void)getIBGUserOpenIdRequest; - (void)handleBizResponseCGIWrap:(id)arg1; - (void)handlePayResponeCGIWrap:(id)arg1; - (void)MessageReturn:(id)arg1 Event:(unsigned int)arg2; - (void)OnJSApiSelectSessionViewControllerNewSession; - (void)OnJSApiSelectSessionViewControllerSelectContactReturn:(id)arg1 atScene:(unsigned int)arg2; - (_Bool)OnJSApiSelectSessionViewControllerFilterContactCandidate:(id)arg1; - (void)OnJSApiSelectSessionViewControllerBack; - (void)OnJSApiSelectContactsViewControllerSelectContactReturn:(id)arg1 atScene:(unsigned int)arg2; - (_Bool)OnJSApiSelectContactsViewControllerFilterContactCandidate:(id)arg1; - (void)OnJSApiSelectContactsViewControllerBack; - (void)dealloc; - (void)confirmEndJSEvent; - (void)cancelEndJSEnvent; - (void)handleJSEvent:(id)arg1 HandlerFacade:(id)arg2 ExtraData:(id)arg3 CurrentViewController:(id)arg4; - (void)handleJSEvent:(id)arg1 HandlerFacade:(id)arg2 ExtraData:(id)arg3; @end
{ "pile_set_name": "Github" }
#ifndef _DELAUNAY_UTILS_H #define _DELAUNAY_UTILS_H #include <vector> #include <queue> #include <functional> using namespace std; #define ONRIGHT(x0, y0, x1, y1, x, y) ((y0-y)*(x1-x) > (x0-x)*(y1-y)) #define EDGE0(node) ((node + 1) % 3) #define EDGE1(node) ((node + 2) % 3) #define INDEX2(arr,ix,jx) (arr[2*ix+jx]) #define INDEX3(arr,ix,jx) (arr[3*ix+jx]) #define INDEXN(arr,N,ix,jx) (arr[N*ix+jx]) #define SQ(a) ((a)*(a)) #define TOLERANCE_EPS (4e-13) #define PERTURB_EPS (1e-3) #define GINORMOUS (1e100) extern int walking_triangles(int start, double targetx, double targety, double *x, double *y, int *nodes, int *neighbors); extern void getminmax(double *arr, int n, double& minimum, double& maximum); extern bool circumcenter(double x0, double y0, double x1, double y1, double x2, double y2, double& centerx, double& centery); extern double signed_area(double x0, double y0, double x1, double y1, double x2, double y2); class SeededPoint { public: SeededPoint() {}; SeededPoint(double x0c, double y0c, double xc, double yc) { this->x0 = x0c; this->y0 = y0c; this->x = xc; this->y = yc; }; ~SeededPoint() {}; double x0, y0; double x, y; bool operator<(const SeededPoint& p2) const { double test = (this->y0-p2.y)*(this->x-p2.x) - (this->x0-p2.x)*(this->y-p2.y); if (test == 0) { double length1 = SQ(this->x-this->x0) + SQ(this->y-this->y0); double length2 = SQ(p2.x-this->x0) + SQ(p2.y-this->y0); return (length2 > length1); } else return (test < 0); } }; class ConvexPolygon { public: ConvexPolygon(); ~ConvexPolygon(); void seed(double x0c, double y0c); void push(double x, double y); double area(); // private: // I don't care much for data-hiding double x0, y0; vector<SeededPoint> points; bool seeded; }; #endif // _DELAUNAY_UTILS_H
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 7242e48b40555bb4c83749fe1c745d55 AssemblyDefinitionImporter: externalObjects: {} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.http.impl.io; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.net.Socket; import java.nio.charset.StandardCharsets; import org.apache.hc.core5.http.ClassicHttpRequest; import org.apache.hc.core5.http.ClassicHttpResponse; import org.apache.hc.core5.http.ContentType; import org.apache.hc.core5.http.HttpEntity; import org.apache.hc.core5.http.Method; import org.apache.hc.core5.http.NotImplementedException; import org.apache.hc.core5.http.ProtocolException; import org.apache.hc.core5.http.config.Http1Config; import org.apache.hc.core5.http.impl.DefaultContentLengthStrategy; import org.apache.hc.core5.http.io.entity.StringEntity; import org.apache.hc.core5.http.message.BasicClassicHttpResponse; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; public class TestDefaultBHttpServerConnection { @Mock private Socket socket; private DefaultBHttpServerConnection conn; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); conn = new DefaultBHttpServerConnection("http", Http1Config.DEFAULT, null, null, DefaultContentLengthStrategy.INSTANCE, DefaultContentLengthStrategy.INSTANCE, DefaultHttpRequestParserFactory.INSTANCE, DefaultHttpResponseWriterFactory.INSTANCE); } @Test public void testBasics() throws Exception { Assert.assertFalse(conn.isOpen()); Assert.assertEquals("[Not bound]", conn.toString()); } @Test public void testReadRequestHead() throws Exception { final String s = "GET / HTTP/1.1\r\nUser-Agent: test\r\n\r\n"; final ByteArrayInputStream inStream = new ByteArrayInputStream(s.getBytes(StandardCharsets.US_ASCII)); Mockito.when(socket.getInputStream()).thenReturn(inStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getRequestCount()); final ClassicHttpRequest request = conn.receiveRequestHeader(); Assert.assertNotNull(request); Assert.assertEquals("/", request.getPath()); Assert.assertEquals(Method.GET.name(), request.getMethod()); Assert.assertTrue(request.containsHeader("User-Agent")); Assert.assertEquals(1, conn.getEndpointDetails().getRequestCount()); } @Test public void testReadRequestEntityWithContentLength() throws Exception { final String s = "POST / HTTP/1.1\r\nUser-Agent: test\r\nContent-Length: 3\r\n\r\n123"; final ByteArrayInputStream inStream = new ByteArrayInputStream(s.getBytes(StandardCharsets.US_ASCII)); Mockito.when(socket.getInputStream()).thenReturn(inStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getRequestCount()); final ClassicHttpRequest request = conn.receiveRequestHeader(); Assert.assertNotNull(request); Assert.assertEquals("/", request.getPath()); Assert.assertEquals(Method.POST.name(), request.getMethod()); Assert.assertTrue(request.containsHeader("User-Agent")); Assert.assertNull(request.getEntity()); Assert.assertEquals(1, conn.getEndpointDetails().getRequestCount()); conn.receiveRequestEntity(request); final HttpEntity entity = request.getEntity(); Assert.assertNotNull(entity); Assert.assertEquals(3, entity.getContentLength()); Assert.assertEquals(1, conn.getEndpointDetails().getRequestCount()); final InputStream content = entity.getContent(); Assert.assertNotNull(content); Assert.assertTrue(content instanceof ContentLengthInputStream); } @Test public void testReadRequestEntityChunckCoded() throws Exception { final String s = "POST /stuff HTTP/1.1\r\nUser-Agent: test\r\nTransfer-Encoding: " + "chunked\r\n\r\n3\r\n123\r\n0\r\n\r\n"; final ByteArrayInputStream inStream = new ByteArrayInputStream(s.getBytes(StandardCharsets.US_ASCII)); Mockito.when(socket.getInputStream()).thenReturn(inStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getRequestCount()); final ClassicHttpRequest request = conn.receiveRequestHeader(); Assert.assertNotNull(request); Assert.assertEquals("/stuff", request.getPath()); Assert.assertEquals(Method.POST.name(), request.getMethod()); Assert.assertTrue(request.containsHeader("User-Agent")); Assert.assertNull(request.getEntity()); Assert.assertEquals(1, conn.getEndpointDetails().getRequestCount()); conn.receiveRequestEntity(request); final HttpEntity entity = request.getEntity(); Assert.assertNotNull(entity); Assert.assertEquals(-1, entity.getContentLength()); Assert.assertEquals(true, entity.isChunked()); Assert.assertEquals(1, conn.getEndpointDetails().getRequestCount()); final InputStream content = entity.getContent(); Assert.assertNotNull(content); Assert.assertTrue(content instanceof ChunkedInputStream); } @Test(expected = ProtocolException.class) public void testReadRequestEntityIdentity() throws Exception { final String s = "POST /stuff HTTP/1.1\r\nUser-Agent: test\r\nTransfer-Encoding: " + "identity\r\n\r\n123"; final ByteArrayInputStream inStream = new ByteArrayInputStream(s.getBytes(StandardCharsets.US_ASCII)); Mockito.when(socket.getInputStream()).thenReturn(inStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getRequestCount()); final ClassicHttpRequest request = conn.receiveRequestHeader(); Assert.assertNotNull(request); Assert.assertEquals("/stuff", request.getPath()); Assert.assertEquals(Method.POST.name(), request.getMethod()); Assert.assertTrue(request.containsHeader("User-Agent")); Assert.assertNull(request.getEntity()); Assert.assertEquals(1, conn.getEndpointDetails().getRequestCount()); conn.receiveRequestEntity(request); } @Test public void testReadRequestNoEntity() throws Exception { final String s = "POST /stuff HTTP/1.1\r\nUser-Agent: test\r\n\r\n"; final ByteArrayInputStream inStream = new ByteArrayInputStream(s.getBytes(StandardCharsets.US_ASCII)); Mockito.when(socket.getInputStream()).thenReturn(inStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getRequestCount()); final ClassicHttpRequest request = conn.receiveRequestHeader(); Assert.assertNotNull(request); Assert.assertEquals("/stuff", request.getPath()); Assert.assertEquals(Method.POST.name(), request.getMethod()); Assert.assertTrue(request.containsHeader("User-Agent")); Assert.assertNull(request.getEntity()); Assert.assertEquals(1, conn.getEndpointDetails().getRequestCount()); conn.receiveRequestEntity(request); final HttpEntity entity = request.getEntity(); Assert.assertNull(entity); } @Test public void testWriteResponseHead() throws Exception { final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); Mockito.when(socket.getOutputStream()).thenReturn(outStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getResponseCount()); final ClassicHttpResponse response = new BasicClassicHttpResponse(200, "OK"); response.addHeader("User-Agent", "test"); conn.sendResponseHeader(response); conn.flush(); Assert.assertEquals(1, conn.getEndpointDetails().getResponseCount()); final String s = new String(outStream.toByteArray(), "ASCII"); Assert.assertEquals("HTTP/1.1 200 OK\r\nUser-Agent: test\r\n\r\n", s); } @Test public void testWriteResponse100Head() throws Exception { final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); Mockito.when(socket.getOutputStream()).thenReturn(outStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getResponseCount()); final ClassicHttpResponse response = new BasicClassicHttpResponse(100, "Go on"); conn.sendResponseHeader(response); conn.flush(); Assert.assertEquals(0, conn.getEndpointDetails().getResponseCount()); final String s = new String(outStream.toByteArray(), "ASCII"); Assert.assertEquals("HTTP/1.1 100 Go on\r\n\r\n", s); } @Test public void testWriteResponseEntityWithContentLength() throws Exception { final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); Mockito.when(socket.getOutputStream()).thenReturn(outStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getResponseCount()); final ClassicHttpResponse response = new BasicClassicHttpResponse(200, "OK"); response.addHeader("Server", "test"); response.addHeader("Content-Length", "3"); response.setEntity(new StringEntity("123", ContentType.TEXT_PLAIN)); conn.sendResponseHeader(response); conn.sendResponseEntity(response); conn.flush(); Assert.assertEquals(1, conn.getEndpointDetails().getResponseCount()); final String s = new String(outStream.toByteArray(), "ASCII"); Assert.assertEquals("HTTP/1.1 200 OK\r\nServer: test\r\nContent-Length: 3\r\n\r\n123", s); } @Test public void testWriteResponseEntityChunkCoded() throws Exception { final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); Mockito.when(socket.getOutputStream()).thenReturn(outStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getResponseCount()); final ClassicHttpResponse response = new BasicClassicHttpResponse(200, "OK"); response.addHeader("Server", "test"); response.addHeader("Transfer-Encoding", "chunked"); response.setEntity(new StringEntity("123", ContentType.TEXT_PLAIN)); conn.sendResponseHeader(response); conn.sendResponseEntity(response); conn.flush(); Assert.assertEquals(1, conn.getEndpointDetails().getResponseCount()); final String s = new String(outStream.toByteArray(), "ASCII"); Assert.assertEquals("HTTP/1.1 200 OK\r\nServer: test\r\nTransfer-Encoding: " + "chunked\r\n\r\n3\r\n123\r\n0\r\n\r\n", s); } @Test(expected = NotImplementedException.class) public void testWriteResponseEntityIdentity() throws Exception { final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); Mockito.when(socket.getOutputStream()).thenReturn(outStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getResponseCount()); final ClassicHttpResponse response = new BasicClassicHttpResponse(200, "OK"); response.addHeader("Server", "test"); response.addHeader("Transfer-Encoding", "identity"); response.setEntity(new StringEntity("123", ContentType.TEXT_PLAIN)); conn.sendResponseHeader(response); conn.sendResponseEntity(response); conn.flush(); } @Test public void testWriteResponseNoEntity() throws Exception { final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); Mockito.when(socket.getOutputStream()).thenReturn(outStream); conn.bind(socket); Assert.assertEquals(0, conn.getEndpointDetails().getResponseCount()); final ClassicHttpResponse response = new BasicClassicHttpResponse(200, "OK"); response.addHeader("Server", "test"); conn.sendResponseHeader(response); conn.sendResponseEntity(response); conn.flush(); Assert.assertEquals(1, conn.getEndpointDetails().getResponseCount()); final String s = new String(outStream.toByteArray(), "ASCII"); Assert.assertEquals("HTTP/1.1 200 OK\r\nServer: test\r\n\r\n", s); } }
{ "pile_set_name": "Github" }
import bpy bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1
{ "pile_set_name": "Github" }
// Copyright 2012-present Oliver Eilhard. All rights reserved. // Use of this source code is governed by a MIT-license. // See http://olivere.mit-license.org/license.txt for details. package elastic import ( "encoding/json" "testing" ) func TestInnerHitEmpty(t *testing.T) { hit := NewInnerHit() src, err := hit.Source() if err != nil { t.Fatal(err) } data, err := json.Marshal(src) if err != nil { t.Fatalf("marshaling to JSON failed: %v", err) } got := string(data) expected := `{}` if got != expected { t.Errorf("expected\n%s\n,got:\n%s", expected, got) } } func TestInnerHitWithName(t *testing.T) { hit := NewInnerHit().Name("comments") src, err := hit.Source() if err != nil { t.Fatal(err) } data, err := json.Marshal(src) if err != nil { t.Fatalf("marshaling to JSON failed: %v", err) } got := string(data) expected := `{"name":"comments"}` if got != expected { t.Errorf("expected\n%s\n,got:\n%s", expected, got) } }
{ "pile_set_name": "Github" }
<?php /* Copyright (c) 1998-2009 ILIAS open source, Extended GPL, see docs/LICENSE */ /** * Saves (mostly asynchronously) user properties of accordions * * @author Alex Killing <[email protected]> * @version $Id$ * @ingroup ServicesAccordion * @ilCtrl_Calls ilAccordionPropertiesStorage: */ class ilAccordionPropertiesStorage { /** * @var ilObjUser */ protected $user; /** * @var ilCtrl */ protected $ctrl; /** * @var ilDB */ protected $db; /** * Constructor */ public function __construct() { global $DIC; $this->user = $DIC->user(); $this->ctrl = $DIC->ctrl(); $this->db = $DIC->database(); } public $properties = array( "opened" => array("storage" => "session") ); /** * execute command */ public function &executeCommand() { $ilUser = $this->user; $ilCtrl = $this->ctrl; $cmd = $ilCtrl->getCmd(); // $next_class = $this->ctrl->getNextClass($this); $this->$cmd(); } /** * Show Filter */ public function setOpenedTab() { $ilUser = $this->user; if ($_GET["user_id"] == $ilUser->getId()) { switch ($_GET["act"]) { case "add": $cur = $this->getProperty( $_GET["accordion_id"], (int) $_GET["user_id"], "opened" ); $cur_arr = explode(";", $cur); if (!in_array((int) $_GET["tab_nr"], $cur_arr)) { $cur_arr[] = (int) $_GET["tab_nr"]; } $this->storeProperty( $_GET["accordion_id"], (int) $_GET["user_id"], "opened", implode($cur_arr, ";") ); break; case "rem": $cur = $this->getProperty( $_GET["accordion_id"], (int) $_GET["user_id"], "opened" ); $cur_arr = explode(";", $cur); if (($key = array_search((int) $_GET["tab_nr"], $cur_arr)) !== false) { unset($cur_arr[$key]); } $this->storeProperty( $_GET["accordion_id"], (int) $_GET["user_id"], "opened", implode($cur_arr, ";") ); break; case "clear": $this->storeProperty( $_GET["accordion_id"], (int) $_GET["user_id"], "opened", "" ); break; case "set": default: $this->storeProperty( $_GET["accordion_id"], (int) $_GET["user_id"], "opened", $_GET["tab_nr"] ); break; } } } /** * Store property in session or db */ public function storeProperty( $a_table_id, $a_user_id, $a_property, $a_value ) { $ilDB = $this->db; switch ($this->properties[$a_property]["storage"]) { case "session": $_SESSION["accordion"][$a_table_id][$a_user_id][$a_property] = $a_value; break; case "db": /* $ilDB->replace("table_properties", array( "table_id" => array("text", $a_table_id), "user_id" => array("integer", $a_user_id), "property" => array("text", $a_property)), array( "value" => array("text", $a_value) )); */ } } /** * Get property in session or db */ public function getProperty($a_table_id, $a_user_id, $a_property) { $ilDB = $this->db; switch ($this->properties[$a_property]["storage"]) { case "session": $r = $_SESSION["accordion"][$a_table_id][$a_user_id][$a_property]; //echo "<br><br><br><br><br><br><br><br>get-".$r; return $r; break; case "db": /* $set = $ilDB->query("SELECT value FROM table_properties ". " WHERE table_id = ".$ilDB->quote($a_table_id, "text"). " AND user_id = ".$ilDB->quote($a_user_id, "integer"). " AND property = ".$ilDB->quote($a_property, "text") ); $rec = $ilDB->fetchAssoc($set); return $rec["value"]; break; */ } } }
{ "pile_set_name": "Github" }
15 2 1 2 3 1 2 3 1 2 3 1 2 3 1 2 3
{ "pile_set_name": "Github" }
/* * Copyright (C) 2013 Christoph Leitner <[email protected]> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "ruc_connection.h" #include "stubborn_sender.h" #include "uc_connection.h" #define DEBUG 1 #define dout DEBUG && std::cout using namespace gr::ieee802_15_4; ruc_connection::ruc_connection(rime_stack* block, uint16_t channel, pmt::pmt_t inport, pmt::pmt_t outport, const uint8_t rime_add_mine[2]) : rime_connection(block, channel, inport, outport, rime_add_mine), d_stubborn_sender(block, this, d_mac_outport), d_send_seqno(0), d_recv_seqno(0) { d_stubborn_sender.start(); } std::array<uint8_t, 256> ruc_connection::make_msgbuf(uint16_t channel, bool ack, int seqno, const uint8_t src[2], const uint8_t dest[2]) { std::array<uint8_t,256> buf; buf[0] = channel & 0xff; buf[1] = (channel >> 8) & 0xff; if(ack){ buf[2] = (1 << 7) & 0xff; //ack packet } else { buf[2] = 0; //data packet } buf[2] |= seqno << (7 - seqno_bits); //set seq.no. buf[2] |= dest[0] >> (1 + seqno_bits); buf[3] = (dest[0] << (7 - seqno_bits)) & 0xff; buf[3] |= dest[1] >> (1 + seqno_bits); buf[4] = (dest[1] << (7 - seqno_bits)) & 0xff; buf[4] |= src[0] >> (1 + seqno_bits); buf[5] = (src[0] << (7 - seqno_bits)) & 0xff; buf[5] |= src[1] >> (1 + seqno_bits); buf[6] = (src[1] << (7 - seqno_bits)) & 0xff; return buf; } void ruc_connection::pack(pmt::pmt_t msg) { assert(d_send_seqno < (1 << seqno_bits)); if(pmt::is_eof_object(msg)){ d_block->message_port_pub(d_mac_outport, pmt::PMT_EOF); d_block->detail().get()->set_done(true); return; } std::string tmp = rime_connection::msg_to_string(msg); uint8_t dest[2]; if(!uc_connection::rime_add_from_string(tmp, dest)){ std::cerr << "Warning: invalid target RIME-Address for runicast on channel "; std::cerr << static_cast<unsigned>(d_channel); std::cerr << ". Message will not be sent." << std::endl; return; } std::array<uint8_t, 256> buf = ruc_connection::make_msgbuf(d_channel, false, d_send_seqno, d_rime_add_mine, dest); size_t data_len = tmp.length(); assert(data_len); assert(data_len < 256 - header_length); std::memcpy(buf.data() + header_length, tmp.data(), data_len); pmt::pmt_t rime_msg = pmt::make_blob(buf.data(), data_len + header_length); pmt::pmt_t dict = pmt::make_dict(); dict = pmt::dict_add(dict, pmt::mp("seqno"), pmt::from_long(d_send_seqno)); d_stubborn_sender.enqueue(pmt::cons(dict, rime_msg)); d_send_seqno = (d_send_seqno + 1)%(1 << seqno_bits); } void ruc_connection::unpack(pmt::pmt_t msg) { uint8_t buf[256]; uint8_t target_rime_zero, target_rime_one; uint8_t sender_rime_zero, sender_rime_one; bool is_ack = false; uint8_t packet_seqno; size_t data_len = pmt::blob_length(msg); std::memcpy(buf, pmt::blob_data(msg), data_len); target_rime_zero = buf[2] << (1 + seqno_bits); target_rime_zero |= buf[3] >> (7 - seqno_bits); target_rime_one = buf[3] << (1 + seqno_bits); target_rime_one |= buf[4] >> (7 - seqno_bits); sender_rime_zero = buf[4] << (1 + seqno_bits); sender_rime_zero |= buf[5] >> (7 - seqno_bits); sender_rime_one = buf[5] << (1 + seqno_bits); sender_rime_one |= buf[6] >> (7 - seqno_bits); dout << "[" << static_cast<int>(d_rime_add_mine[0]) << "."; dout << static_cast<int>(d_rime_add_mine[1]) << "]: "; //this block is not the destination of the message if(target_rime_zero != d_rime_add_mine[0] || target_rime_one != d_rime_add_mine[1]){ dout << "received packet with wrong receiver, discarding"; dout << "(" << static_cast<int>(target_rime_zero) << "."; dout << static_cast<int>(target_rime_one) << ")" << std::endl; return; } if((buf[2] & 0x80) > 0){ is_ack = true; buf[2] &= 0x7f; //reset ack-flag } packet_seqno = buf[2] >> (7 - seqno_bits); if(is_ack){ if(packet_seqno != recv_seqno()){ //ignore duplicate packets dout << "received duplicate ack "; dout << static_cast<int>(packet_seqno); dout << " (should be: "; dout << recv_seqno() << ")"<< std::endl; return; } dout << "received ack for seqno "; dout << static_cast<int>(packet_seqno) << std::endl; d_stubborn_sender.stop(); inc_recv_seqno(); dout << "expected next seqno: " << d_recv_seqno << std::endl; } else { //output message pmt::pmt_t rime_payload = pmt::make_blob(buf + header_length, data_len - header_length); d_block->message_port_pub(d_outport, pmt::cons(pmt::PMT_NIL, rime_payload)); //send ack uint8_t dest[] = {sender_rime_zero, sender_rime_one}; std::array<uint8_t, 256> buf = make_msgbuf(d_channel, true, packet_seqno, d_rime_add_mine, dest); dout << "sent ack message for seqno " << static_cast<int>(packet_seqno); dout << " to "; dout << static_cast<int>(dest[0]) << "."; dout << static_cast<int>(dest[1]) << std::endl; pmt::pmt_t ack_msg = pmt::make_blob(buf.data(), header_length); d_block->message_port_pub(d_mac_outport, pmt::cons(pmt::PMT_NIL, ack_msg)); } } void ruc_connection::inc_recv_seqno() { gr::thread::scoped_lock lock(d_mutex); d_recv_seqno = (d_recv_seqno + 1)%(1 << seqno_bits); } int ruc_connection::recv_seqno() { gr::thread::scoped_lock lock(d_mutex); return d_recv_seqno; }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <script src="/benchmark/js/jquery.min.js"></script> <script type="text/javascript" src="/benchmark/js/js.cookie.js"></script> <title>BenchmarkTest02202</title> </head> <body> <form action="/benchmark/pathtraver-02/BenchmarkTest02202" method="POST" id="FormBenchmarkTest02202"> <p>Please make your car selection, and edit the value to be sent [value]:</p> <select name="BenchmarkTest02202" multiple> <option value="FileName" selected>Audi. [FileName]</option> <option value="Ford">Ford. [Ford]</option> <option value="Opel">Opel. [Opel]</option> <option value="Subaru">Subaru. [Subaru]</option> </select><br/><br/> <input type="submit" value="Submit"> </form> </body> </html>
{ "pile_set_name": "Github" }
package coursier.publish /** * All things signing. */ package object signing
{ "pile_set_name": "Github" }
// 2001-05-21 Benjamin Kosnik <[email protected]> // Copyright (C) 2001-2013 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // 27.8.1.4 Overridden virtual functions #include <fstream> #include <testsuite_hooks.h> const char name_05[] = "filebuf_virtuals-5.txt"; // empty file, need to create bool over_called; class Derived_filebuf : public std::filebuf { public: int_type overflow(int_type c) { over_called = true; return std::filebuf::overflow(c); } const char_type* pub_epptr() const { return epptr(); } const char_type* pub_pptr() const { return pptr(); } }; // libstdc++/9701 (partial) void test11() { bool test __attribute__((unused)) = true; bool over_expected; // sputc Derived_filebuf dfbuf_01; dfbuf_01.open(name_05, std::ios_base::out); over_called = false; dfbuf_01.sputc('i'); VERIFY( over_called ); over_expected = dfbuf_01.pub_epptr() == dfbuf_01.pub_pptr(); over_called = false; dfbuf_01.sputc('v'); VERIFY( (!over_expected && !over_called) || (over_expected && over_called) ); dfbuf_01.close(); } int main() { test11(); return 0; }
{ "pile_set_name": "Github" }
--- title: Vue.js 0.10.6, and what's next date: 2014-07-29 00:04:55 --- ## 0.10.6 Vue.js 0.10.6 has been released! This is another small bug-fix release and will be the last maintainance version before the next major release. <!-- more --> - fix `v-style` error when value is falsy or a number. ( thanks to [@dmfilipenko](https://github.com/dmfilipenko) ) - fix the built-in `currency` filter error when value is a string ( thanks to [@dmfilipenko](https://github.com/dmfilipenko) ) - fix `Vue.require` for building with Component v1.0+ ( thanks to [@kewah](https://github.com/kewah) ) - Allow template nodes to be passed as a template option ( thanks to [@jordangarcia](https://github.com/jordangarcia) ) - `vm.$destroy()` now accepts an optional argument `noRemove`. When passed in as `true` it will leave the vm's DOM node intact after the vm is destroyed. ## Vue-next Some of you might have noticed there is a [next](https://github.com/yyx990803/vue/tree/next) branch in the repo. And yes, I am re-writing Vue.js from scratch. There are two main reasons: - Fix some issues that are caused by design flaws in the current version. Because these changes affect the design of some core modules, it is actually easier to rewrite than to apply on the current codebase. - Improve general code quality (in particular, `compiler.js` as of now is a big pile of mess, and comments are not consistent across the codebase.) Take note that the `next` branch is still in **very** early stage. The internals will change a lot, and when it comes out it **will** break current applications. Despite that I will try to keep the API changes to a minimum. Major differences with current 0.10 branch are documented in [`changes.md`](https://github.com/yyx990803/vue/blob/next/changes.md). The list is obviously incomplete and subject to change, some of them are simply ideas, but it at least gives you a taste of what to expect, and I'd appreicate your feedback on any of the topics. Share your thoughts at [vuejs/Discussion](https://github.com/vuejs/Discussion/issues).
{ "pile_set_name": "Github" }
<html><head> <link rel="stylesheet" href="style.css" type="text/css"> <meta content="text/html; charset=iso-8859-1" http-equiv="Content-Type"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="Start" href="index.html"> <link title="Index of types" rel=Appendix href="index_types.html"> <link title="Index of exceptions" rel=Appendix href="index_exceptions.html"> <link title="Index of values" rel=Appendix href="index_values.html"> <link title="Index of modules" rel=Appendix href="index_modules.html"> <link title="Index of module types" rel=Appendix href="index_module_types.html"> <link title="Arg" rel="Chapter" href="Arg.html"> <link title="Arg_helper" rel="Chapter" href="Arg_helper.html"> <link title="Array" rel="Chapter" href="Array.html"> <link title="ArrayLabels" rel="Chapter" href="ArrayLabels.html"> <link title="Ast_helper" rel="Chapter" href="Ast_helper.html"> <link title="Ast_invariants" rel="Chapter" href="Ast_invariants.html"> <link title="Ast_iterator" rel="Chapter" href="Ast_iterator.html"> <link title="Ast_mapper" rel="Chapter" href="Ast_mapper.html"> <link title="Asttypes" rel="Chapter" href="Asttypes.html"> <link title="Attr_helper" rel="Chapter" href="Attr_helper.html"> <link title="Bigarray" rel="Chapter" href="Bigarray.html"> <link title="Buffer" rel="Chapter" href="Buffer.html"> <link title="Build_path_prefix_map" rel="Chapter" href="Build_path_prefix_map.html"> <link title="Builtin_attributes" rel="Chapter" href="Builtin_attributes.html"> <link title="Bytes" rel="Chapter" href="Bytes.html"> <link title="BytesLabels" rel="Chapter" href="BytesLabels.html"> <link title="Callback" rel="Chapter" href="Callback.html"> <link title="CamlinternalFormat" rel="Chapter" href="CamlinternalFormat.html"> <link title="CamlinternalFormatBasics" rel="Chapter" href="CamlinternalFormatBasics.html"> <link title="CamlinternalLazy" rel="Chapter" href="CamlinternalLazy.html"> <link title="CamlinternalMod" rel="Chapter" href="CamlinternalMod.html"> <link title="CamlinternalOO" rel="Chapter" href="CamlinternalOO.html"> <link title="Ccomp" rel="Chapter" href="Ccomp.html"> <link title="Char" rel="Chapter" href="Char.html"> <link title="Clflags" rel="Chapter" href="Clflags.html"> <link title="Complex" rel="Chapter" href="Complex.html"> <link title="Condition" rel="Chapter" href="Condition.html"> <link title="Config" rel="Chapter" href="Config.html"> <link title="Consistbl" rel="Chapter" href="Consistbl.html"> <link title="Depend" rel="Chapter" href="Depend.html"> <link title="Digest" rel="Chapter" href="Digest.html"> <link title="Docstrings" rel="Chapter" href="Docstrings.html"> <link title="Dynlink" rel="Chapter" href="Dynlink.html"> <link title="Ephemeron" rel="Chapter" href="Ephemeron.html"> <link title="Event" rel="Chapter" href="Event.html"> <link title="Filename" rel="Chapter" href="Filename.html"> <link title="Float" rel="Chapter" href="Float.html"> <link title="Format" rel="Chapter" href="Format.html"> <link title="Gc" rel="Chapter" href="Gc.html"> <link title="Genlex" rel="Chapter" href="Genlex.html"> <link title="Graphics" rel="Chapter" href="Graphics.html"> <link title="GraphicsX11" rel="Chapter" href="GraphicsX11.html"> <link title="Hashtbl" rel="Chapter" href="Hashtbl.html"> <link title="Identifiable" rel="Chapter" href="Identifiable.html"> <link title="Int32" rel="Chapter" href="Int32.html"> <link title="Int64" rel="Chapter" href="Int64.html"> <link title="Lazy" rel="Chapter" href="Lazy.html"> <link title="Lexer" rel="Chapter" href="Lexer.html"> <link title="Lexing" rel="Chapter" href="Lexing.html"> <link title="List" rel="Chapter" href="List.html"> <link title="ListLabels" rel="Chapter" href="ListLabels.html"> <link title="Location" rel="Chapter" href="Location.html"> <link title="Longident" rel="Chapter" href="Longident.html"> <link title="Map" rel="Chapter" href="Map.html"> <link title="Marshal" rel="Chapter" href="Marshal.html"> <link title="Misc" rel="Chapter" href="Misc.html"> <link title="MoreLabels" rel="Chapter" href="MoreLabels.html"> <link title="Mutex" rel="Chapter" href="Mutex.html"> <link title="Nativeint" rel="Chapter" href="Nativeint.html"> <link title="Numbers" rel="Chapter" href="Numbers.html"> <link title="Obj" rel="Chapter" href="Obj.html"> <link title="Oo" rel="Chapter" href="Oo.html"> <link title="Parse" rel="Chapter" href="Parse.html"> <link title="Parser" rel="Chapter" href="Parser.html"> <link title="Parsetree" rel="Chapter" href="Parsetree.html"> <link title="Parsing" rel="Chapter" href="Parsing.html"> <link title="Pervasives" rel="Chapter" href="Pervasives.html"> <link title="Pparse" rel="Chapter" href="Pparse.html"> <link title="Pprintast" rel="Chapter" href="Pprintast.html"> <link title="Printast" rel="Chapter" href="Printast.html"> <link title="Printexc" rel="Chapter" href="Printexc.html"> <link title="Printf" rel="Chapter" href="Printf.html"> <link title="Profile" rel="Chapter" href="Profile.html"> <link title="Queue" rel="Chapter" href="Queue.html"> <link title="Random" rel="Chapter" href="Random.html"> <link title="Scanf" rel="Chapter" href="Scanf.html"> <link title="Seq" rel="Chapter" href="Seq.html"> <link title="Set" rel="Chapter" href="Set.html"> <link title="Simplif" rel="Chapter" href="Simplif.html"> <link title="Sort" rel="Chapter" href="Sort.html"> <link title="Spacetime" rel="Chapter" href="Spacetime.html"> <link title="Stack" rel="Chapter" href="Stack.html"> <link title="StdLabels" rel="Chapter" href="StdLabels.html"> <link title="Str" rel="Chapter" href="Str.html"> <link title="Stream" rel="Chapter" href="Stream.html"> <link title="String" rel="Chapter" href="String.html"> <link title="StringLabels" rel="Chapter" href="StringLabels.html"> <link title="Strongly_connected_components" rel="Chapter" href="Strongly_connected_components.html"> <link title="Syntaxerr" rel="Chapter" href="Syntaxerr.html"> <link title="Sys" rel="Chapter" href="Sys.html"> <link title="Targetint" rel="Chapter" href="Targetint.html"> <link title="Tbl" rel="Chapter" href="Tbl.html"> <link title="Terminfo" rel="Chapter" href="Terminfo.html"> <link title="Thread" rel="Chapter" href="Thread.html"> <link title="ThreadUnix" rel="Chapter" href="ThreadUnix.html"> <link title="Typemod" rel="Chapter" href="Typemod.html"> <link title="Uchar" rel="Chapter" href="Uchar.html"> <link title="Unix" rel="Chapter" href="Unix.html"> <link title="UnixLabels" rel="Chapter" href="UnixLabels.html"> <link title="Warnings" rel="Chapter" href="Warnings.html"> <link title="Weak" rel="Chapter" href="Weak.html"><title>Docstrings</title> </head> <body> <code class="code"><span class="keyword">sig</span><br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;init&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;warn_bad_docstrings&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">type</span>&nbsp;docstring<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;docstring&nbsp;:&nbsp;string&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Location</span>.t&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docstring<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;register&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;docstring_body&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;string<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;docstring_loc&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Location</span>.t<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;set_pre_docstrings&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Lexing</span>.position&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;list&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;set_post_docstrings&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Lexing</span>.position&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;list&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;set_floating_docstrings&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Lexing</span>.position&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;list&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;set_pre_extra_docstrings&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Lexing</span>.position&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;list&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;set_post_extra_docstrings&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Lexing</span>.position&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;list&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">type</span>&nbsp;docs&nbsp;=&nbsp;{<br> &nbsp;&nbsp;&nbsp;&nbsp;docs_pre&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;option;<br> &nbsp;&nbsp;&nbsp;&nbsp;docs_post&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;option;<br> &nbsp;&nbsp;}<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;empty_docs&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docs<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;docs_attr&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attribute<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;add_docs_attrs&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Docstrings</span>.docs&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attributes&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attributes<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;symbol_docs&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docs<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;symbol_docs_lazy&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docs&nbsp;<span class="constructor">Lazy</span>.t<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_docs&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docs<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_docs_lazy&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.docs&nbsp;<span class="constructor">Lazy</span>.t<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;mark_symbol_docs&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;mark_rhs_docs&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;unit<br> &nbsp;&nbsp;<span class="keyword">type</span>&nbsp;info&nbsp;=&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;option<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;empty_info&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.info<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;info_attr&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attribute<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;add_info_attrs&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Docstrings</span>.info&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attributes&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attributes<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;symbol_info&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.info<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_info&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.info<br> &nbsp;&nbsp;<span class="keyword">type</span>&nbsp;text&nbsp;=&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;list<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;empty_text&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.text<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;empty_text_lazy&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.text&nbsp;<span class="constructor">Lazy</span>.t<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;text_attr&nbsp;:&nbsp;<span class="constructor">Docstrings</span>.docstring&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attribute<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;add_text_attrs&nbsp;:<br> &nbsp;&nbsp;&nbsp;&nbsp;<span class="constructor">Docstrings</span>.text&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attributes&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Parsetree</span>.attributes<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;symbol_text&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;symbol_text_lazy&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text&nbsp;<span class="constructor">Lazy</span>.t<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_text&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_text_lazy&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text&nbsp;<span class="constructor">Lazy</span>.t<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;symbol_pre_extra_text&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;symbol_post_extra_text&nbsp;:&nbsp;unit&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_pre_extra_text&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_post_extra_text&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text<br> &nbsp;&nbsp;<span class="keyword">val</span>&nbsp;rhs_post_text&nbsp;:&nbsp;int&nbsp;<span class="keywordsign">-&gt;</span>&nbsp;<span class="constructor">Docstrings</span>.text<br> <span class="keyword">end</span></code></body></html>
{ "pile_set_name": "Github" }
// Copyright 2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. function f(a, b, c) { if (a == 0 || b == 0) return a; return a + c; } assertEquals(0, f(0, 0, 0)); assertEquals(0, f(0, 1, 0)); assertEquals(1, f(1, 0, 0)); assertEquals(2, f(2, 1, 0)); // Force deoptimization in --always-opt mode when evaluating // the 'a + c' expression. Make sure this doesn't end up // returning 'a'. assertEquals(1.5, f(1, 1, 0.5)); assertEquals(2.5, f(2, 1, 0.5));
{ "pile_set_name": "Github" }
// TODO: it'd be great to merge it with the other canReorder functionality var rulesOverlap = require('./rules-overlap'); var specificitiesOverlap = require('./specificities-overlap'); var FLEX_PROPERTIES = /align\-items|box\-align|box\-pack|flex|justify/; var BORDER_PROPERTIES = /^border\-(top|right|bottom|left|color|style|width|radius)/; function canReorder(left, right, cache) { for (var i = right.length - 1; i >= 0; i--) { for (var j = left.length - 1; j >= 0; j--) { if (!canReorderSingle(left[j], right[i], cache)) return false; } } return true; } function canReorderSingle(left, right, cache) { var leftName = left[0]; var leftValue = left[1]; var leftNameRoot = left[2]; var leftSelector = left[5]; var leftInSpecificSelector = left[6]; var rightName = right[0]; var rightValue = right[1]; var rightNameRoot = right[2]; var rightSelector = right[5]; var rightInSpecificSelector = right[6]; if (leftName == 'font' && rightName == 'line-height' || rightName == 'font' && leftName == 'line-height') return false; if (FLEX_PROPERTIES.test(leftName) && FLEX_PROPERTIES.test(rightName)) return false; if (leftNameRoot == rightNameRoot && unprefixed(leftName) == unprefixed(rightName) && (vendorPrefixed(leftName) ^ vendorPrefixed(rightName))) return false; if (leftNameRoot == 'border' && BORDER_PROPERTIES.test(rightNameRoot) && (leftName == 'border' || leftName == rightNameRoot || (leftValue != rightValue && sameBorderComponent(leftName, rightName)))) return false; if (rightNameRoot == 'border' && BORDER_PROPERTIES.test(leftNameRoot) && (rightName == 'border' || rightName == leftNameRoot || (leftValue != rightValue && sameBorderComponent(leftName, rightName)))) return false; if (leftNameRoot == 'border' && rightNameRoot == 'border' && leftName != rightName && (isSideBorder(leftName) && isStyleBorder(rightName) || isStyleBorder(leftName) && isSideBorder(rightName))) return false; if (leftNameRoot != rightNameRoot) return true; if (leftName == rightName && leftNameRoot == rightNameRoot && (leftValue == rightValue || withDifferentVendorPrefix(leftValue, rightValue))) return true; if (leftName != rightName && leftNameRoot == rightNameRoot && leftName != leftNameRoot && rightName != rightNameRoot) return true; if (leftName != rightName && leftNameRoot == rightNameRoot && leftValue == rightValue) return true; if (rightInSpecificSelector && leftInSpecificSelector && !inheritable(leftNameRoot) && !inheritable(rightNameRoot) && !rulesOverlap(rightSelector, leftSelector, false)) return true; if (!specificitiesOverlap(leftSelector, rightSelector, cache)) return true; return false; } function vendorPrefixed(name) { return /^\-(?:moz|webkit|ms|o)\-/.test(name); } function unprefixed(name) { return name.replace(/^\-(?:moz|webkit|ms|o)\-/, ''); } function sameBorderComponent(name1, name2) { return name1.split('-').pop() == name2.split('-').pop(); } function isSideBorder(name) { return name == 'border-top' || name == 'border-right' || name == 'border-bottom' || name == 'border-left'; } function isStyleBorder(name) { return name == 'border-color' || name == 'border-style' || name == 'border-width'; } function withDifferentVendorPrefix(value1, value2) { return vendorPrefixed(value1) && vendorPrefixed(value2) && value1.split('-')[1] != value2.split('-')[2]; } function inheritable(name) { // According to http://www.w3.org/TR/CSS21/propidx.html // Others will be catched by other, preceeding rules return name == 'font' || name == 'line-height' || name == 'list-style'; } module.exports = { canReorder: canReorder, canReorderSingle: canReorderSingle };
{ "pile_set_name": "Github" }
/* contrib/pg_trgm/pg_trgm--1.2--1.3.sql */ -- complain if script is sourced in psql, rather than via ALTER EXTENSION \echo Use "ALTER EXTENSION pg_trgm UPDATE TO '1.3'" to load this file. \quit -- Update procedure signatures the hard way. -- We use to_regprocedure() so that query doesn't fail if run against 9.6beta1 definitions, -- wherein the signatures have been updated already. In that case to_regprocedure() will -- return NULL and no updates will happen. DO LANGUAGE plpgsql $$ DECLARE my_schema pg_catalog.text := pg_catalog.quote_ident(pg_catalog.current_schema()); old_path pg_catalog.text := pg_catalog.current_setting('search_path'); BEGIN -- for safety, transiently set search_path to just pg_catalog+pg_temp PERFORM pg_catalog.set_config('search_path', 'pg_catalog, pg_temp', true); UPDATE pg_catalog.pg_proc SET proargtypes = pg_catalog.array_to_string(newtypes::pg_catalog.oid[], ' ')::pg_catalog.oidvector, pronargs = pg_catalog.array_length(newtypes, 1) FROM (VALUES (NULL::pg_catalog.text, NULL::pg_catalog.text[]), -- establish column types ('gtrgm_consistent(internal,text,int4,oid,internal)', '{internal,text,int2,oid,internal}'), ('gtrgm_distance(internal,text,int4,oid)', '{internal,text,int2,oid,internal}'), ('gtrgm_union(bytea,internal)', '{internal,internal}') ) AS update_data (oldproc, newtypestext), LATERAL ( SELECT array_agg(replace(typ, 'SCH', my_schema)::regtype) as newtypes FROM unnest(newtypestext) typ ) ls WHERE oid = to_regprocedure(my_schema || '.' || replace(oldproc, 'SCH', my_schema)); UPDATE pg_catalog.pg_proc SET prorettype = (my_schema || '.gtrgm')::pg_catalog.regtype WHERE oid = pg_catalog.to_regprocedure(my_schema || '.gtrgm_union(internal,internal)'); PERFORM pg_catalog.set_config('search_path', old_path, true); END $$; ALTER FUNCTION set_limit(float4) PARALLEL UNSAFE; ALTER FUNCTION show_limit() PARALLEL SAFE; ALTER FUNCTION show_trgm(text) PARALLEL SAFE; ALTER FUNCTION similarity(text, text) PARALLEL SAFE; ALTER FUNCTION similarity_op(text, text) PARALLEL SAFE; ALTER FUNCTION word_similarity(text, text) PARALLEL SAFE; ALTER FUNCTION word_similarity_op(text, text) PARALLEL SAFE; ALTER FUNCTION word_similarity_commutator_op(text, text) PARALLEL SAFE; ALTER FUNCTION similarity_dist(text, text) PARALLEL SAFE; ALTER FUNCTION word_similarity_dist_op(text, text) PARALLEL SAFE; ALTER FUNCTION word_similarity_dist_commutator_op(text, text) PARALLEL SAFE; ALTER FUNCTION gtrgm_in(cstring) PARALLEL SAFE; ALTER FUNCTION gtrgm_out(gtrgm) PARALLEL SAFE; ALTER FUNCTION gtrgm_consistent(internal, text, smallint, oid, internal) PARALLEL SAFE; ALTER FUNCTION gtrgm_distance(internal, text, smallint, oid, internal) PARALLEL SAFE; ALTER FUNCTION gtrgm_compress(internal) PARALLEL SAFE; ALTER FUNCTION gtrgm_decompress(internal) PARALLEL SAFE; ALTER FUNCTION gtrgm_penalty(internal, internal, internal) PARALLEL SAFE; ALTER FUNCTION gtrgm_picksplit(internal, internal) PARALLEL SAFE; ALTER FUNCTION gtrgm_union(internal, internal) PARALLEL SAFE; ALTER FUNCTION gtrgm_same(gtrgm, gtrgm, internal) PARALLEL SAFE; ALTER FUNCTION gin_extract_value_trgm(text, internal) PARALLEL SAFE; ALTER FUNCTION gin_extract_query_trgm(text, internal, int2, internal, internal, internal, internal) PARALLEL SAFE; ALTER FUNCTION gin_trgm_consistent(internal, int2, text, int4, internal, internal, internal, internal) PARALLEL SAFE; ALTER FUNCTION gin_trgm_triconsistent(internal, int2, text, int4, internal, internal, internal) PARALLEL SAFE;
{ "pile_set_name": "Github" }
CREATE TABLE list (id VARCHAR(2) NOT NULL, value VARCHAR(64) NOT NULL, PRIMARY KEY(id)) DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci ENGINE = InnoDB; INSERT INTO `list` (`id`, `value`) VALUES ('af', 'Afrikaans Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('af_NA', 'Afrikaans Simi (Namibia)'); INSERT INTO `list` (`id`, `value`) VALUES ('af_ZA', 'Afrikaans Simi (Sudáfrica)'); INSERT INTO `list` (`id`, `value`) VALUES ('ak', 'Akan'); INSERT INTO `list` (`id`, `value`) VALUES ('ak_GH', 'Akan (Ghana)'); INSERT INTO `list` (`id`, `value`) VALUES ('sq', 'Albanes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('sq_AL', 'Albanes Simi (Albania)'); INSERT INTO `list` (`id`, `value`) VALUES ('sq_MK', 'Albanes Simi (ERY Macedonia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sq_XK', 'Albanian (Kosovo)'); INSERT INTO `list` (`id`, `value`) VALUES ('de', 'Aleman Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('de_DE', 'Aleman Simi (Alemania)'); INSERT INTO `list` (`id`, `value`) VALUES ('de_AT', 'Aleman Simi (Austria)'); INSERT INTO `list` (`id`, `value`) VALUES ('de_BE', 'Aleman Simi (Bélgica)'); INSERT INTO `list` (`id`, `value`) VALUES ('de_LI', 'Aleman Simi (Liechtenstein)'); INSERT INTO `list` (`id`, `value`) VALUES ('de_LU', 'Aleman Simi (Luxemburgo)'); INSERT INTO `list` (`id`, `value`) VALUES ('de_CH', 'Aleman Simi (Suiza)'); INSERT INTO `list` (`id`, `value`) VALUES ('am', 'Amarico Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('am_ET', 'Amarico Simi (Etiopía)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar', 'Arabe Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_SA', 'Arabe Simi (Arabia Saudí)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_DZ', 'Arabe Simi (Argelia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_BH', 'Arabe Simi (Bahréin)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_TD', 'Arabe Simi (Chad)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_KM', 'Arabe Simi (Comoras)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_EG', 'Arabe Simi (Egipto)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_ER', 'Arabe Simi (Eritrea)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_IQ', 'Arabe Simi (Iraq)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_IL', 'Arabe Simi (Israel)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_JO', 'Arabe Simi (Jordania)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_KW', 'Arabe Simi (Kuwait)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_LB', 'Arabe Simi (Líbano)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_MA', 'Arabe Simi (Marruecos)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_MR', 'Arabe Simi (Mauritania)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_OM', 'Arabe Simi (Omán)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_PS', 'Arabe Simi (Palestina Kamachikuq)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_QA', 'Arabe Simi (Qatar)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_SY', 'Arabe Simi (Siria)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_SO', 'Arabe Simi (Somalia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_SS', 'Arabe Simi (Sudán del Sur)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_SD', 'Arabe Simi (Sudán)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_TN', 'Arabe Simi (Túnez)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_YE', 'Arabe Simi (Yemen)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_DJ', 'Arabe Simi (Yibuti)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_LY', 'Arabic (Libya)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_AE', 'Arabic (United Arab Emirates)'); INSERT INTO `list` (`id`, `value`) VALUES ('ar_EH', 'Arabic (Western Sahara)'); INSERT INTO `list` (`id`, `value`) VALUES ('hy', 'Armenio Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('hy_AM', 'Armenio Simi (Armenia)'); INSERT INTO `list` (`id`, `value`) VALUES ('as', 'Asames Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('as_IN', 'Asames Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('az_Cyrl_AZ', 'Azerbaijani (Cyrillic, Azerbaijan)'); INSERT INTO `list` (`id`, `value`) VALUES ('az_Cyrl', 'Azerbaijani (Cyrillic)'); INSERT INTO `list` (`id`, `value`) VALUES ('az_Latn_AZ', 'Azerbaijani (Latin, Azerbaijan)'); INSERT INTO `list` (`id`, `value`) VALUES ('az_Latn', 'Azerbaijani (Latin)'); INSERT INTO `list` (`id`, `value`) VALUES ('az', 'Azerbaiyano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('az_AZ', 'Azerbaiyano Simi (Azerbaiyán)'); INSERT INTO `list` (`id`, `value`) VALUES ('bm', 'Bambara'); INSERT INTO `list` (`id`, `value`) VALUES ('bm_Latn_ML', 'Bambara (Latin, Mali)'); INSERT INTO `list` (`id`, `value`) VALUES ('bm_Latn', 'Bambara (Latin)'); INSERT INTO `list` (`id`, `value`) VALUES ('bn', 'Bangla Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('bn_BD', 'Bangla Simi (Bangladesh)'); INSERT INTO `list` (`id`, `value`) VALUES ('bn_IN', 'Bangla Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('be', 'Bielorruso Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('be_BY', 'Bielorruso Simi (Belarús)'); INSERT INTO `list` (`id`, `value`) VALUES ('bs_BA', 'Bosnian (Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('bs_Cyrl_BA', 'Bosnian (Cyrillic, Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('bs_Cyrl', 'Bosnian (Cyrillic)'); INSERT INTO `list` (`id`, `value`) VALUES ('bs_Latn_BA', 'Bosnian (Latin, Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('bs_Latn', 'Bosnian (Latin)'); INSERT INTO `list` (`id`, `value`) VALUES ('bs', 'Bosnio Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('br', 'Breton Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('br_FR', 'Breton Simi (Francia)'); INSERT INTO `list` (`id`, `value`) VALUES ('bg', 'Bulgaro Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('bg_BG', 'Bulgaro Simi (Bulgaria)'); INSERT INTO `list` (`id`, `value`) VALUES ('my', 'Burmese'); INSERT INTO `list` (`id`, `value`) VALUES ('my_MM', 'Burmese (Myanmar (Burma))'); INSERT INTO `list` (`id`, `value`) VALUES ('ca', 'Catalan Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ca_AD', 'Catalan Simi (Andorra)'); INSERT INTO `list` (`id`, `value`) VALUES ('ca_ES', 'Catalan Simi (España)'); INSERT INTO `list` (`id`, `value`) VALUES ('ca_FR', 'Catalan Simi (Francia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ca_IT', 'Catalan Simi (Italia)'); INSERT INTO `list` (`id`, `value`) VALUES ('cs', 'Checo Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('se', 'Chincha Sami Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('se_FI', 'Chincha Sami Simi (Finlandia)'); INSERT INTO `list` (`id`, `value`) VALUES ('se_NO', 'Chincha Sami Simi (Noruega)'); INSERT INTO `list` (`id`, `value`) VALUES ('se_SE', 'Chincha Sami Simi (Suecia)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hans_CN', 'Chinese (Simplified, China)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hans_HK', 'Chinese (Simplified, Hong Kong SAR China)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hans_MO', 'Chinese (Simplified, Macau SAR China)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hans_SG', 'Chinese (Simplified, Singapore)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hans', 'Chinese (Simplified)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_TW', 'Chinese (Taiwan)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hant_HK', 'Chinese (Traditional, Hong Kong SAR China)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hant_MO', 'Chinese (Traditional, Macau SAR China)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hant_TW', 'Chinese (Traditional, Taiwan)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_Hant', 'Chinese (Traditional)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh', 'Chino Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_CN', 'Chino Simi (China)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_HK', 'Chino Simi (Hong Kong (RAE))'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_MO', 'Chino Simi (Macao RAE)'); INSERT INTO `list` (`id`, `value`) VALUES ('zh_SG', 'Chino Simi (Singapur)'); INSERT INTO `list` (`id`, `value`) VALUES ('si', 'Cingales Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('si_LK', 'Cingales Simi (Sri Lanka)'); INSERT INTO `list` (`id`, `value`) VALUES ('ko', 'Coreano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ko_KP', 'Coreano Simi (Corea del Norte)'); INSERT INTO `list` (`id`, `value`) VALUES ('ko_KR', 'Coreano Simi (Corea del Sur)'); INSERT INTO `list` (`id`, `value`) VALUES ('kw', 'Cornish'); INSERT INTO `list` (`id`, `value`) VALUES ('kw_GB', 'Cornish (United Kingdom)'); INSERT INTO `list` (`id`, `value`) VALUES ('hr', 'Croata Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('hr_HR', 'Croata Simi (Croacia)'); INSERT INTO `list` (`id`, `value`) VALUES ('hr_BA', 'Croatian (Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('cs_CZ', 'Czech (Czech Republic)'); INSERT INTO `list` (`id`, `value`) VALUES ('da', 'Danes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('da_DK', 'Danes Simi (Dinamarca)'); INSERT INTO `list` (`id`, `value`) VALUES ('da_GL', 'Danish (Greenland)'); INSERT INTO `list` (`id`, `value`) VALUES ('nl_AW', 'Dutch (Aruba)'); INSERT INTO `list` (`id`, `value`) VALUES ('dz', 'Dzongkha'); INSERT INTO `list` (`id`, `value`) VALUES ('dz_BT', 'Dzongkha (Bhutan)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_AI', 'English (Anguilla)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_AG', 'English (Antigua & Barbuda)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_BB', 'English (Barbados)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_BZ', 'English (Belize)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_BM', 'English (Bermuda)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_IO', 'English (British Indian Ocean Territory)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_VG', 'English (British Virgin Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_CA', 'English (Canada)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_KY', 'English (Cayman Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_CK', 'English (Cook Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_DG', 'English (Diego Garcia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_FK', 'English (Falkland Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GI', 'English (Gibraltar)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GD', 'English (Grenada)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_IE', 'English (Ireland)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_IM', 'English (Isle of Man)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_JM', 'English (Jamaica)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MY', 'English (Malaysia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MS', 'English (Montserrat)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_NZ', 'English (New Zealand)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_NU', 'English (Niue)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SB', 'English (Solomon Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SH', 'English (St. Helena)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_LC', 'English (St. Lucia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_VC', 'English (St. Vincent & Grenadines)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_TK', 'English (Tokelau)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_TC', 'English (Turks & Caicos Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_TV', 'English (Tuvalu)'); INSERT INTO `list` (`id`, `value`) VALUES ('sk', 'Eslovaco Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('sk_SK', 'Eslovaco Simi (Eslovaquia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sl', 'Esloveno Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('sl_SI', 'Esloveno Simi (Eslovenia)'); INSERT INTO `list` (`id`, `value`) VALUES ('es', 'Español Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('es_AR', 'Español Simi (Argentina)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_BO', 'Español Simi (Bolivia)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_CL', 'Español Simi (Chile)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_CO', 'Español Simi (Colombia)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_CR', 'Español Simi (Costa Rica)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_CU', 'Español Simi (Cuba)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_EC', 'Español Simi (Ecuador)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_SV', 'Español Simi (El Salvador)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_ES', 'Español Simi (España)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_US', 'Español Simi (Estados Unidos)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_PH', 'Español Simi (Filipinas)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_GT', 'Español Simi (Guatemala)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_GQ', 'Español Simi (Guinea Ecuatorial)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_HN', 'Español Simi (Honduras)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_MX', 'Español Simi (México)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_NI', 'Español Simi (Nicaragua)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_PA', 'Español Simi (Panamá)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_PY', 'Español Simi (Paraguay)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_PE', 'Español Simi (Perú)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_PR', 'Español Simi (Puerto Rico)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_UY', 'Español Simi (Uruguay)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_VE', 'Español Simi (Venezuela)'); INSERT INTO `list` (`id`, `value`) VALUES ('eo', 'Esperanto'); INSERT INTO `list` (`id`, `value`) VALUES ('et', 'Estonio Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('et_EE', 'Estonio Simi (Estonia)'); INSERT INTO `list` (`id`, `value`) VALUES ('eu', 'Euskera Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('eu_ES', 'Euskera Simi (España)'); INSERT INTO `list` (`id`, `value`) VALUES ('ee', 'Ewe'); INSERT INTO `list` (`id`, `value`) VALUES ('ee_GH', 'Ewe (Ghana)'); INSERT INTO `list` (`id`, `value`) VALUES ('ee_TG', 'Ewe (Togo)'); INSERT INTO `list` (`id`, `value`) VALUES ('fo_FO', 'Faroese (Faroe Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('fo', 'Feroes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('fi', 'Fines Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('fi_FI', 'Fines Simi (Finlandia)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr', 'Frances Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_DZ', 'Frances Simi (Argelia)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_BE', 'Frances Simi (Bélgica)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_BJ', 'Frances Simi (Benín)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_BI', 'Frances Simi (Burundi)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_CM', 'Frances Simi (Camerún)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_TD', 'Frances Simi (Chad)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_KM', 'Frances Simi (Comoras)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_CD', 'Frances Simi (Congo (RDC))'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_CG', 'Frances Simi (Congo)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_CI', 'Frances Simi (Côte dʼIvoire)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_FR', 'Frances Simi (Francia)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_GA', 'Frances Simi (Gabón)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_GQ', 'Frances Simi (Guinea Ecuatorial)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_GN', 'Frances Simi (Guinea)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_HT', 'Frances Simi (Haití)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_LU', 'Frances Simi (Luxemburgo)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_MG', 'Frances Simi (Madagascar)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_ML', 'Frances Simi (Malí)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_MA', 'Frances Simi (Marruecos)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_MU', 'Frances Simi (Mauricio)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_MR', 'Frances Simi (Mauritania)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_MC', 'Frances Simi (Mónaco)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_NE', 'Frances Simi (Níger)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_NC', 'Frances Simi (Nueva Caledonia)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_PF', 'Frances Simi (Polinesia Francesa)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_RW', 'Frances Simi (Ruanda)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_MF', 'Frances Simi (San Martín)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_PM', 'Frances Simi (San Pedro y Miquelón)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_SN', 'Frances Simi (Senegal)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_SC', 'Frances Simi (Seychelles)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_SY', 'Frances Simi (Siria)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_CH', 'Frances Simi (Suiza)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_TG', 'Frances Simi (Togo)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_TN', 'Frances Simi (Túnez)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_VU', 'Frances Simi (Vanuatu)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_WF', 'Frances Simi (Wallis, Futuna)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_DJ', 'Frances Simi (Yibuti)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_BF', 'French (Burkina Faso)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_CA', 'French (Canada)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_CF', 'French (Central African Republic)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_GF', 'French (French Guiana)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_GP', 'French (Guadeloupe)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_MQ', 'French (Martinique)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_YT', 'French (Mayotte)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_RE', 'French (Réunion)'); INSERT INTO `list` (`id`, `value`) VALUES ('fr_BL', 'French (St. Barthélemy)'); INSERT INTO `list` (`id`, `value`) VALUES ('fy', 'Frison Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('fy_NL', 'Frison Simi (Países Bajos)'); INSERT INTO `list` (`id`, `value`) VALUES ('ff', 'Fulah Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ff_CM', 'Fulah Simi (Camerún)'); INSERT INTO `list` (`id`, `value`) VALUES ('ff_GN', 'Fulah Simi (Guinea)'); INSERT INTO `list` (`id`, `value`) VALUES ('ff_MR', 'Fulah Simi (Mauritania)'); INSERT INTO `list` (`id`, `value`) VALUES ('ff_SN', 'Fulah Simi (Senegal)'); INSERT INTO `list` (`id`, `value`) VALUES ('gd', 'Gaelico Escoces Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('gd_GB', 'Gaelico Escoces Simi (Reino Unido)'); INSERT INTO `list` (`id`, `value`) VALUES ('cy', 'Gales Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('cy_GB', 'Gales Simi (Reino Unido)'); INSERT INTO `list` (`id`, `value`) VALUES ('gl', 'Gallego Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('gl_ES', 'Gallego Simi (España)'); INSERT INTO `list` (`id`, `value`) VALUES ('lg', 'Ganda'); INSERT INTO `list` (`id`, `value`) VALUES ('lg_UG', 'Ganda (Uganda)'); INSERT INTO `list` (`id`, `value`) VALUES ('ka_GE', 'Georgian (Georgia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ka', 'Georgiano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('el', 'Griego Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('el_CY', 'Griego Simi (Chipre)'); INSERT INTO `list` (`id`, `value`) VALUES ('el_GR', 'Griego Simi (Grecia)'); INSERT INTO `list` (`id`, `value`) VALUES ('kl', 'Groenlandes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('gu', 'Gujarati Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('gu_IN', 'Gujarati Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ha_Latn_GH', 'Hausa (Latin, Ghana)'); INSERT INTO `list` (`id`, `value`) VALUES ('ha_Latn_NE', 'Hausa (Latin, Niger)'); INSERT INTO `list` (`id`, `value`) VALUES ('ha_Latn_NG', 'Hausa (Latin, Nigeria)'); INSERT INTO `list` (`id`, `value`) VALUES ('ha_Latn', 'Hausa (Latin)'); INSERT INTO `list` (`id`, `value`) VALUES ('ha', 'Hausa Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ha_GH', 'Hausa Simi (Ghana)'); INSERT INTO `list` (`id`, `value`) VALUES ('ha_NE', 'Hausa Simi (Níger)'); INSERT INTO `list` (`id`, `value`) VALUES ('ha_NG', 'Hausa Simi (Nigeria)'); INSERT INTO `list` (`id`, `value`) VALUES ('he', 'Hebreo Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('he_IL', 'Hebreo Simi (Israel)'); INSERT INTO `list` (`id`, `value`) VALUES ('hi', 'Hindi Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('hi_IN', 'Hindi Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('hu_HU', 'Hungarian (Hungary)'); INSERT INTO `list` (`id`, `value`) VALUES ('hu', 'Hungaro Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ig', 'Igbo Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ig_NG', 'Igbo Simi (Nigeria)'); INSERT INTO `list` (`id`, `value`) VALUES ('id', 'Indonesio Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('id_ID', 'Indonesio Simi (Indonesia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en', 'Ingles Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('en_AU', 'Ingles Simi (Australia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_BS', 'Ingles Simi (Bahamas)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_BE', 'Ingles Simi (Bélgica)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_BW', 'Ingles Simi (Botsuana)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_CM', 'Ingles Simi (Camerún)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_DM', 'Ingles Simi (Dominica)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_VI', 'Ingles Simi (EE.UU. Islas Vírgenes)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_ER', 'Ingles Simi (Eritrea)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_US', 'Ingles Simi (Estados Unidos)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_PH', 'Ingles Simi (Filipinas)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_FJ', 'Ingles Simi (Fiyi)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GM', 'Ingles Simi (Gambia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GH', 'Ingles Simi (Ghana)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GU', 'Ingles Simi (Guam)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GG', 'Ingles Simi (Guernsey)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GY', 'Ingles Simi (Guyana)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_HK', 'Ingles Simi (Hong Kong (RAE))'); INSERT INTO `list` (`id`, `value`) VALUES ('en_IN', 'Ingles Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_CX', 'Ingles Simi (Isla Christmas)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_NF', 'Ingles Simi (Isla Norfolk)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_CC', 'Ingles Simi (Islas Cocos)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MP', 'Ingles Simi (Islas Marianas del Norte)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MH', 'Ingles Simi (Islas Marshall)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_UM', 'Ingles Simi (Islas menores alejadas de los EE.UU.)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_PN', 'Ingles Simi (Islas Pitcairn)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_JE', 'Ingles Simi (Jersey)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_KE', 'Ingles Simi (Kenia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_KI', 'Ingles Simi (Kiribati)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_LS', 'Ingles Simi (Lesoto)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_LR', 'Ingles Simi (Liberia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MO', 'Ingles Simi (Macao RAE)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MG', 'Ingles Simi (Madagascar)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MW', 'Ingles Simi (Malawi)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MT', 'Ingles Simi (Malta)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_MU', 'Ingles Simi (Mauricio)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_FM', 'Ingles Simi (Micronesia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_NA', 'Ingles Simi (Namibia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_NR', 'Ingles Simi (Nauru)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_NG', 'Ingles Simi (Nigeria)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_PK', 'Ingles Simi (Pakistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_PW', 'Ingles Simi (Palaos)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_PG', 'Ingles Simi (Papúa Nueva Guinea)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_PR', 'Ingles Simi (Puerto Rico)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_GB', 'Ingles Simi (Reino Unido)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_RW', 'Ingles Simi (Ruanda)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_AS', 'Ingles Simi (Samoa Americana)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_WS', 'Ingles Simi (Samoa)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_KN', 'Ingles Simi (San Cristóbal y Nieves)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SC', 'Ingles Simi (Seychelles)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SL', 'Ingles Simi (Sierra Leona)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SG', 'Ingles Simi (Singapur)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SX', 'Ingles Simi (Sint Maarten)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SZ', 'Ingles Simi (Suazilandia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_ZA', 'Ingles Simi (Sudáfrica)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SS', 'Ingles Simi (Sudán del Sur)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_SD', 'Ingles Simi (Sudán)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_TZ', 'Ingles Simi (Tanzania)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_TO', 'Ingles Simi (Tonga)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_TT', 'Ingles Simi (Trinidad y Tobago)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_UG', 'Ingles Simi (Uganda)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_VU', 'Ingles Simi (Vanuatu)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_ZM', 'Ingles Simi (Zambia)'); INSERT INTO `list` (`id`, `value`) VALUES ('en_ZW', 'Ingles Simi (Zimbabue)'); INSERT INTO `list` (`id`, `value`) VALUES ('ga_IE', 'Irish (Ireland)'); INSERT INTO `list` (`id`, `value`) VALUES ('ga', 'Irlandes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('zu', 'Isizulu Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('zu_ZA', 'Isizulu Simi (Sudáfrica)'); INSERT INTO `list` (`id`, `value`) VALUES ('is', 'Islandes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('is_IS', 'Islandes Simi (Islandia)'); INSERT INTO `list` (`id`, `value`) VALUES ('it', 'Italiano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('it_IT', 'Italiano Simi (Italia)'); INSERT INTO `list` (`id`, `value`) VALUES ('it_SM', 'Italiano Simi (San Marino)'); INSERT INTO `list` (`id`, `value`) VALUES ('it_CH', 'Italiano Simi (Suiza)'); INSERT INTO `list` (`id`, `value`) VALUES ('ja_JP', 'Japanese (Japan)'); INSERT INTO `list` (`id`, `value`) VALUES ('ja', 'Japones Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('kl_GL', 'Kalaallisut (Greenland)'); INSERT INTO `list` (`id`, `value`) VALUES ('kn', 'Kannada Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('kn_IN', 'Kannada Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ks', 'Kashmiri'); INSERT INTO `list` (`id`, `value`) VALUES ('ks_Arab_IN', 'Kashmiri (Arabic, India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ks_Arab', 'Kashmiri (Arabic)'); INSERT INTO `list` (`id`, `value`) VALUES ('ks_IN', 'Kashmiri (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('kk', 'Kazajo Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('kk_KZ', 'Kazajo Simi (Kazajistan)'); INSERT INTO `list` (`id`, `value`) VALUES ('kk_Cyrl_KZ', 'Kazakh (Cyrillic, Kazakhstan)'); INSERT INTO `list` (`id`, `value`) VALUES ('kk_Cyrl', 'Kazakh (Cyrillic)'); INSERT INTO `list` (`id`, `value`) VALUES ('km', 'Khmer Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('km_KH', 'Khmer Simi (Camboya)'); INSERT INTO `list` (`id`, `value`) VALUES ('ki', 'Kikuyu'); INSERT INTO `list` (`id`, `value`) VALUES ('ki_KE', 'Kikuyu (Kenya)'); INSERT INTO `list` (`id`, `value`) VALUES ('rw', 'Kinyarwanda Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('rw_RW', 'Kinyarwanda Simi (Ruanda)'); INSERT INTO `list` (`id`, `value`) VALUES ('ky', 'Kirghiz Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ky_KG', 'Kirghiz Simi (Kirguistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('ky_Cyrl_KG', 'Kyrgyz (Cyrillic, Kyrgyzstan)'); INSERT INTO `list` (`id`, `value`) VALUES ('ky_Cyrl', 'Kyrgyz (Cyrillic)'); INSERT INTO `list` (`id`, `value`) VALUES ('lo', 'Lao Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('lo_LA', 'Lao Simi (Laos)'); INSERT INTO `list` (`id`, `value`) VALUES ('lv', 'Leton Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('lv_LV', 'Leton Simi (Letonia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ln', 'Lingala'); INSERT INTO `list` (`id`, `value`) VALUES ('ln_AO', 'Lingala (Angola)'); INSERT INTO `list` (`id`, `value`) VALUES ('ln_CF', 'Lingala (Central African Republic)'); INSERT INTO `list` (`id`, `value`) VALUES ('ln_CG', 'Lingala (Congo - Brazzaville)'); INSERT INTO `list` (`id`, `value`) VALUES ('ln_CD', 'Lingala (Congo - Kinshasa)'); INSERT INTO `list` (`id`, `value`) VALUES ('lt', 'Lituano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('lt_LT', 'Lituano Simi (Lituania)'); INSERT INTO `list` (`id`, `value`) VALUES ('lu', 'Luba-Katanga'); INSERT INTO `list` (`id`, `value`) VALUES ('lu_CD', 'Luba-Katanga (Congo - Kinshasa)'); INSERT INTO `list` (`id`, `value`) VALUES ('lb', 'Luxemburgues Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('lb_LU', 'Luxemburgues Simi (Luxemburgo)'); INSERT INTO `list` (`id`, `value`) VALUES ('mk', 'Macedonio Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('mk_MK', 'Macedonio Simi (ERY Macedonia)'); INSERT INTO `list` (`id`, `value`) VALUES ('mg', 'Malagasy'); INSERT INTO `list` (`id`, `value`) VALUES ('mg_MG', 'Malagasy (Madagascar)'); INSERT INTO `list` (`id`, `value`) VALUES ('ms_Latn_BN', 'Malay (Latin, Brunei)'); INSERT INTO `list` (`id`, `value`) VALUES ('ms_Latn_MY', 'Malay (Latin, Malaysia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ms_Latn_SG', 'Malay (Latin, Singapore)'); INSERT INTO `list` (`id`, `value`) VALUES ('ms_Latn', 'Malay (Latin)'); INSERT INTO `list` (`id`, `value`) VALUES ('ms_MY', 'Malay (Malaysia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ml', 'Malayalam Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ml_IN', 'Malayalam Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ms', 'Malayo Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ms_BN', 'Malayo Simi (Brunéi)'); INSERT INTO `list` (`id`, `value`) VALUES ('ms_SG', 'Malayo Simi (Singapur)'); INSERT INTO `list` (`id`, `value`) VALUES ('mt', 'Maltes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('mt_MT', 'Maltes Simi (Malta)'); INSERT INTO `list` (`id`, `value`) VALUES ('gv', 'Manx'); INSERT INTO `list` (`id`, `value`) VALUES ('gv_IM', 'Manx (Isle of Man)'); INSERT INTO `list` (`id`, `value`) VALUES ('mr', 'Marathi Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('mr_IN', 'Marathi Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('mn', 'Mongol Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('mn_Cyrl_MN', 'Mongolian (Cyrillic, Mongolia)'); INSERT INTO `list` (`id`, `value`) VALUES ('mn_Cyrl', 'Mongolian (Cyrillic)'); INSERT INTO `list` (`id`, `value`) VALUES ('mn_MN', 'Mongolian (Mongolia)'); INSERT INTO `list` (`id`, `value`) VALUES ('nl', 'Neerlandes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('nl_BE', 'Neerlandes Simi (Bélgica)'); INSERT INTO `list` (`id`, `value`) VALUES ('nl_BQ', 'Neerlandes Simi (Bonaire)'); INSERT INTO `list` (`id`, `value`) VALUES ('nl_CW', 'Neerlandes Simi (Curazao)'); INSERT INTO `list` (`id`, `value`) VALUES ('nl_NL', 'Neerlandes Simi (Países Bajos)'); INSERT INTO `list` (`id`, `value`) VALUES ('nl_SX', 'Neerlandes Simi (Sint Maarten)'); INSERT INTO `list` (`id`, `value`) VALUES ('nl_SR', 'Neerlandes Simi (Surinam)'); INSERT INTO `list` (`id`, `value`) VALUES ('ne', 'Nepali Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ne_IN', 'Nepali Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ne_NP', 'Nepali Simi (Nepal)'); INSERT INTO `list` (`id`, `value`) VALUES ('nd', 'North Ndebele'); INSERT INTO `list` (`id`, `value`) VALUES ('nd_ZW', 'North Ndebele (Zimbabwe)'); INSERT INTO `list` (`id`, `value`) VALUES ('no', 'Noruego Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('no_NO', 'Noruego Simi (Noruega)'); INSERT INTO `list` (`id`, `value`) VALUES ('nb', 'Norwegian Bokmål'); INSERT INTO `list` (`id`, `value`) VALUES ('nb_NO', 'Norwegian Bokmål (Norway)'); INSERT INTO `list` (`id`, `value`) VALUES ('nb_SJ', 'Norwegian Bokmål (Svalbard & Jan Mayen)'); INSERT INTO `list` (`id`, `value`) VALUES ('nn', 'Norwegian Nynorsk'); INSERT INTO `list` (`id`, `value`) VALUES ('nn_NO', 'Norwegian Nynorsk (Norway)'); INSERT INTO `list` (`id`, `value`) VALUES ('or', 'Odia Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('or_IN', 'Odia Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('om', 'Oromo'); INSERT INTO `list` (`id`, `value`) VALUES ('om_ET', 'Oromo (Ethiopia)'); INSERT INTO `list` (`id`, `value`) VALUES ('om_KE', 'Oromo (Kenya)'); INSERT INTO `list` (`id`, `value`) VALUES ('os', 'Ossetic'); INSERT INTO `list` (`id`, `value`) VALUES ('os_GE', 'Ossetic (Georgia)'); INSERT INTO `list` (`id`, `value`) VALUES ('os_RU', 'Ossetic (Russia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ps', 'Pashto Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ps_AF', 'Pashto Simi (Afganistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('fa', 'Persa Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('fa_AF', 'Persa Simi (Afganistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('fa_IR', 'Persa Simi (Irán)'); INSERT INTO `list` (`id`, `value`) VALUES ('pl', 'Polaco Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('pl_PL', 'Polaco Simi (Polonia)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt', 'Portugues Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_AO', 'Portugues Simi (Angola)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_BR', 'Portugues Simi (Brasil)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_GW', 'Portugues Simi (Guinea-Bissau)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_MO', 'Portugues Simi (Macao RAE)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_MZ', 'Portugues Simi (Mozambique)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_PT', 'Portugues Simi (Portugal)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_ST', 'Portugues Simi (Santo Tomé y Príncipe)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_TL', 'Portugues Simi (Timor-Leste)'); INSERT INTO `list` (`id`, `value`) VALUES ('pt_CV', 'Portuguese (Cape Verde)'); INSERT INTO `list` (`id`, `value`) VALUES ('pa_Arab_PK', 'Punjabi (Arabic, Pakistan)'); INSERT INTO `list` (`id`, `value`) VALUES ('pa_Arab', 'Punjabi (Arabic)'); INSERT INTO `list` (`id`, `value`) VALUES ('pa_Guru_IN', 'Punjabi (Gurmukhi, India)'); INSERT INTO `list` (`id`, `value`) VALUES ('pa_Guru', 'Punjabi (Gurmukhi)'); INSERT INTO `list` (`id`, `value`) VALUES ('pa', 'Punyabi Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('pa_IN', 'Punyabi Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('pa_PK', 'Punyabi Simi (Pakistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('rm', 'Romanche Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('rm_CH', 'Romanche Simi (Suiza)'); INSERT INTO `list` (`id`, `value`) VALUES ('ro_RO', 'Romanian (Romania)'); INSERT INTO `list` (`id`, `value`) VALUES ('ro', 'Rumano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ro_MD', 'Rumano Simi (Moldova)'); INSERT INTO `list` (`id`, `value`) VALUES ('qu', 'Runasimi'); INSERT INTO `list` (`id`, `value`) VALUES ('qu_BO', 'Runasimi (Bolivia)'); INSERT INTO `list` (`id`, `value`) VALUES ('qu_EC', 'Runasimi (Ecuador)'); INSERT INTO `list` (`id`, `value`) VALUES ('qu_PE', 'Runasimi (Perú)'); INSERT INTO `list` (`id`, `value`) VALUES ('rn', 'Rundi'); INSERT INTO `list` (`id`, `value`) VALUES ('rn_BI', 'Rundi (Burundi)'); INSERT INTO `list` (`id`, `value`) VALUES ('ru', 'Ruso Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ru_BY', 'Ruso Simi (Belarús)'); INSERT INTO `list` (`id`, `value`) VALUES ('ru_KZ', 'Ruso Simi (Kazajistan)'); INSERT INTO `list` (`id`, `value`) VALUES ('ru_KG', 'Ruso Simi (Kirguistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('ru_MD', 'Ruso Simi (Moldova)'); INSERT INTO `list` (`id`, `value`) VALUES ('ru_RU', 'Ruso Simi (Rusia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ru_UA', 'Russian (Ukraine)'); INSERT INTO `list` (`id`, `value`) VALUES ('sg', 'Sango'); INSERT INTO `list` (`id`, `value`) VALUES ('sg_CF', 'Sango (Central African Republic)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_BA', 'Serbian (Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Cyrl_BA', 'Serbian (Cyrillic, Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Cyrl_XK', 'Serbian (Cyrillic, Kosovo)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Cyrl_ME', 'Serbian (Cyrillic, Montenegro)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Cyrl_RS', 'Serbian (Cyrillic, Serbia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Cyrl', 'Serbian (Cyrillic)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_XK', 'Serbian (Kosovo)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Latn_BA', 'Serbian (Latin, Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Latn_XK', 'Serbian (Latin, Kosovo)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Latn_ME', 'Serbian (Latin, Montenegro)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Latn_RS', 'Serbian (Latin, Serbia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_Latn', 'Serbian (Latin)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_ME', 'Serbian (Montenegro)'); INSERT INTO `list` (`id`, `value`) VALUES ('sr', 'Serbio Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('sr_RS', 'Serbio Simi (Serbia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sh', 'Serbo-Croatian'); INSERT INTO `list` (`id`, `value`) VALUES ('sh_BA', 'Serbo-Croatian (Bosnia & Herzegovina)'); INSERT INTO `list` (`id`, `value`) VALUES ('sn', 'Shona'); INSERT INTO `list` (`id`, `value`) VALUES ('sn_ZW', 'Shona (Zimbabwe)'); INSERT INTO `list` (`id`, `value`) VALUES ('so', 'Somali'); INSERT INTO `list` (`id`, `value`) VALUES ('so_DJ', 'Somali (Djibouti)'); INSERT INTO `list` (`id`, `value`) VALUES ('so_ET', 'Somali (Ethiopia)'); INSERT INTO `list` (`id`, `value`) VALUES ('so_KE', 'Somali (Kenya)'); INSERT INTO `list` (`id`, `value`) VALUES ('so_SO', 'Somali (Somalia)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_IC', 'Spanish (Canary Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_EA', 'Spanish (Ceuta & Melilla)'); INSERT INTO `list` (`id`, `value`) VALUES ('es_DO', 'Spanish (Dominican Republic)'); INSERT INTO `list` (`id`, `value`) VALUES ('sw', 'Suajili Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('sw_KE', 'Suajili Simi (Kenia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sw_TZ', 'Suajili Simi (Tanzania)'); INSERT INTO `list` (`id`, `value`) VALUES ('sw_UG', 'Suajili Simi (Uganda)'); INSERT INTO `list` (`id`, `value`) VALUES ('sv', 'Sueco Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('sv_FI', 'Sueco Simi (Finlandia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sv_SE', 'Sueco Simi (Suecia)'); INSERT INTO `list` (`id`, `value`) VALUES ('sv_AX', 'Swedish (Åland Islands)'); INSERT INTO `list` (`id`, `value`) VALUES ('tl', 'Tagalog'); INSERT INTO `list` (`id`, `value`) VALUES ('tl_PH', 'Tagalog (Philippines)'); INSERT INTO `list` (`id`, `value`) VALUES ('th', 'Tailandes Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('th_TH', 'Tailandes Simi (Tailandia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ta_MY', 'Tamil (Malaysia)'); INSERT INTO `list` (`id`, `value`) VALUES ('ta', 'Tamil Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ta_IN', 'Tamil Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ta_SG', 'Tamil Simi (Singapur)'); INSERT INTO `list` (`id`, `value`) VALUES ('ta_LK', 'Tamil Simi (Sri Lanka)'); INSERT INTO `list` (`id`, `value`) VALUES ('te', 'Telugu Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('te_IN', 'Telugu Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('bo', 'Tibetano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('bo_CN', 'Tibetano Simi (China)'); INSERT INTO `list` (`id`, `value`) VALUES ('bo_IN', 'Tibetano Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ti', 'Tigriña Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ti_ER', 'Tigriña Simi (Eritrea)'); INSERT INTO `list` (`id`, `value`) VALUES ('ti_ET', 'Tigriña Simi (Etiopía)'); INSERT INTO `list` (`id`, `value`) VALUES ('to', 'Tongan'); INSERT INTO `list` (`id`, `value`) VALUES ('to_TO', 'Tongan (Tonga)'); INSERT INTO `list` (`id`, `value`) VALUES ('tr', 'Turco Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('tr_CY', 'Turco Simi (Chipre)'); INSERT INTO `list` (`id`, `value`) VALUES ('tr_TR', 'Turco Simi (Turquía)'); INSERT INTO `list` (`id`, `value`) VALUES ('uk', 'Ucraniano Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ug', 'Uigur Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ug_CN', 'Uigur Simi (China)'); INSERT INTO `list` (`id`, `value`) VALUES ('uk_UA', 'Ukrainian (Ukraine)'); INSERT INTO `list` (`id`, `value`) VALUES ('ur', 'Urdu Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ur_IN', 'Urdu Simi (India)'); INSERT INTO `list` (`id`, `value`) VALUES ('ur_PK', 'Urdu Simi (Pakistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('ug_Arab_CN', 'Uyghur (Arabic, China)'); INSERT INTO `list` (`id`, `value`) VALUES ('ug_Arab', 'Uyghur (Arabic)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_Arab_AF', 'Uzbek (Arabic, Afghanistan)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_Arab', 'Uzbek (Arabic)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_Cyrl_UZ', 'Uzbek (Cyrillic, Uzbekistan)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_Cyrl', 'Uzbek (Cyrillic)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_Latn_UZ', 'Uzbek (Latin, Uzbekistan)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_Latn', 'Uzbek (Latin)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz', 'Uzbeko Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_AF', 'Uzbeko Simi (Afganistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('uz_UZ', 'Uzbeko Simi (Uzbekistán)'); INSERT INTO `list` (`id`, `value`) VALUES ('vi', 'Vietnamita Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('vi_VN', 'Vietnamita Simi (Vietnam)'); INSERT INTO `list` (`id`, `value`) VALUES ('ii', 'Yi Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('ii_CN', 'Yi Simi (China)'); INSERT INTO `list` (`id`, `value`) VALUES ('yi', 'Yiddish'); INSERT INTO `list` (`id`, `value`) VALUES ('yo', 'Yoruba Simi'); INSERT INTO `list` (`id`, `value`) VALUES ('yo_BJ', 'Yoruba Simi (Benín)'); INSERT INTO `list` (`id`, `value`) VALUES ('yo_NG', 'Yoruba Simi (Nigeria)');
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.client.replication; /** * The peer already exists */ public class PeerNotFoundException extends Exception { private static final long serialVersionUID = 1L; public PeerNotFoundException(String peer) { this(peer, (String) null); } public PeerNotFoundException(String peer, String message) { super("Peer '" + peer + "' not found " + (message == null || message.isEmpty() ? "" : message)); } public PeerNotFoundException(String message, Throwable cause) { super(message, cause); } public PeerNotFoundException(String peer, String message, Throwable cause) { super("Peer '" + peer + "' not found " + message, cause); } }
{ "pile_set_name": "Github" }
<?php namespace Pimcore\Tests\Cache\Adapter\SymfonyProxy; use Cache\IntegrationTests\TaggableCachePoolTest; use Pimcore\Cache\Pool\PimcoreCacheItemPoolInterface; use Pimcore\Tests\Cache\Factory; use Pimcore\Tests\Cache\Pool\SymfonyProxy\Traits\SymfonyProxyTestTrait; use Pimcore\Tests\Cache\Pool\Traits\CacheItemPoolTestTrait; /** * @group cache.core.array */ class TaggableArrayAdapterProxyTest extends TaggableCachePoolTest { use SymfonyProxyTestTrait; use CacheItemPoolTestTrait; protected $skippedTests = [ 'testPreviousTag' => 'Previous tags are not loaded for performance reasons.', 'testPreviousTagDeferred' => 'Previous tags are not loaded for performance reasons.', 'testTagAccessorDuplicateTags' => 'Previous tags are not loaded for performance reasons.', ]; /** * @return PimcoreCacheItemPoolInterface */ protected function buildCachePool() { return (new Factory())->createArrayAdapterProxyItemPool($this->defaultLifetime); } }
{ "pile_set_name": "Github" }
// give it a tarball and a path, and it'll dump the contents module.exports = Extract var tar = require("../tar.js") , fstream = require("fstream") , inherits = require("inherits") , path = require("path") function Extract (opts) { if (!(this instanceof Extract)) return new Extract(opts) tar.Parse.apply(this) if (typeof opts !== "object") { opts = { path: opts } } // better to drop in cwd? seems more standard. opts.path = opts.path || path.resolve("node-tar-extract") opts.type = "Directory" opts.Directory = true // similar to --strip or --strip-components opts.strip = +opts.strip if (!opts.strip || opts.strip <= 0) opts.strip = 0 this._fst = fstream.Writer(opts) this.pause() var me = this // Hardlinks in tarballs are relative to the root // of the tarball. So, they need to be resolved against // the target directory in order to be created properly. me.on("entry", function (entry) { // if there's a "strip" argument, then strip off that many // path components. if (opts.strip) { var p = entry.path.split("/").slice(opts.strip).join("/") entry.path = entry.props.path = p if (entry.linkpath) { var lp = entry.linkpath.split("/").slice(opts.strip).join("/") entry.linkpath = entry.props.linkpath = lp } } if (entry.type === "Link") { entry.linkpath = entry.props.linkpath = path.join(opts.path, path.join("/", entry.props.linkpath)) } if (entry.type === "SymbolicLink") { var dn = path.dirname(entry.path) || "" var linkpath = entry.props.linkpath var target = path.resolve(opts.path, dn, linkpath) if (target.indexOf(opts.path) !== 0) { linkpath = path.join(opts.path, path.join("/", linkpath)) } entry.linkpath = entry.props.linkpath = linkpath } }) this._fst.on("ready", function () { me.pipe(me._fst, { end: false }) me.resume() }) this._fst.on('error', function(err) { me.emit('error', err) }) this._fst.on('drain', function() { me.emit('drain') }) // this._fst.on("end", function () { // console.error("\nEEEE Extract End", me._fst.path) // }) this._fst.on("close", function () { // console.error("\nEEEE Extract End", me._fst.path) me.emit("finish") me.emit("end") me.emit("close") }) } inherits(Extract, tar.Parse) Extract.prototype._streamEnd = function () { var me = this if (!me._ended || me._entry) me.error("unexpected eof") me._fst.end() // my .end() is coming later. }
{ "pile_set_name": "Github" }
package cn.rongcloud.im.viewmodel; import android.app.Application; import android.net.Uri; import androidx.annotation.NonNull; import androidx.lifecycle.AndroidViewModel; import androidx.lifecycle.LiveData; import androidx.lifecycle.ViewModel; import androidx.lifecycle.ViewModelProvider; import cn.rongcloud.im.db.model.UserInfo; import cn.rongcloud.im.im.IMManager; import cn.rongcloud.im.model.Resource; import cn.rongcloud.im.model.Result; import cn.rongcloud.im.task.UserTask; import cn.rongcloud.im.utils.SingleSourceLiveData; import cn.rongcloud.im.utils.log.SLog; public class UserInfoViewModel extends AndroidViewModel { private final UserTask userTask; private IMManager imManager; private SingleSourceLiveData<Resource<UserInfo>> userInfo = new SingleSourceLiveData<>(); private SingleSourceLiveData<Resource<Result>> setNameResult = new SingleSourceLiveData<>(); private SingleSourceLiveData<Resource<Result>> uploadPotraitResult = new SingleSourceLiveData<>(); private SingleSourceLiveData<Resource<Result>> changePasswordResult = new SingleSourceLiveData<>(); private SingleSourceLiveData<Resource<Result>> setStAccountResult = new SingleSourceLiveData<>(); private SingleSourceLiveData<Resource<Result>> setGenderResult = new SingleSourceLiveData<>(); public UserInfoViewModel(@NonNull Application application) { super(application); imManager = IMManager.getInstance(); userTask = new UserTask(application); requestUserInfo(imManager.getCurrentId()); } public UserInfoViewModel(String userId, @NonNull Application application) { super(application); userTask = new UserTask(application); requestUserInfo(userId); } /** * 获取 UserInfo * * @return */ public LiveData<Resource<UserInfo>> getUserInfo() { return userInfo; } /** * 设置name 结果 * * @return */ public LiveData<Resource<Result>> getSetNameResult() { return setNameResult; } /** * 设置 StAccount 结果 * * @return */ public LiveData<Resource<Result>> getSetStAccountResult() { return setStAccountResult; } /** * 设置性别结果 * * @return */ public LiveData<Resource<Result>> getSetGenderResult() { return setGenderResult; } /** * 上传头像结果 * * @return */ public LiveData<Resource<Result>> getUploadPortraitResult() { return uploadPotraitResult; } /** * 密码修改 * * @return */ public LiveData<Resource<Result>> getChangePasswordResult() { return changePasswordResult; } /** * 设置用户名 * * @param newName */ public void setName(String newName) { setNameResult.setSource(userTask.setMyNickName(newName)); } /** * 设置自己的 SealTalk 账号 * * @param stAccount */ public void setStAccount(String stAccount) { setStAccountResult.setSource(userTask.setStAccount(stAccount)); } /** * 设置性别 * * @param gender */ public void setGender(String gender) { setGenderResult.setSource(userTask.setGender(gender)); } /** * 上传头像 * * @param uri */ public void uploadPortrait(Uri uri) { uploadPotraitResult.setSource(userTask.setPortrait(uri)); } /** * 修改密码 * * @param oldPassword * @param newPassword */ public void changePassword(String oldPassword, String newPassword) { changePasswordResult.setSource(userTask.changePassword(oldPassword, newPassword)); } /** * 请求用户信息 * * @param userId */ private void requestUserInfo(String userId) { SLog.d("ss_usertask", "userId == " + userId); userInfo.setSource(userTask.getUserInfo(userId)); } /** * 退出 */ public void logout() { imManager.logout(); userTask.logout(); } public static class Factory extends ViewModelProvider.NewInstanceFactory { private String userId; private Application application; public Factory(String userId, Application application) { this.userId = userId; this.application = application; } @NonNull @Override public <T extends ViewModel> T create(@NonNull Class<T> modelClass) { try { return modelClass.getConstructor(String.class, Application.class).newInstance(userId, application); } catch (Exception e) { throw new RuntimeException("Cannot create an instance of " + modelClass, e); } } } }
{ "pile_set_name": "Github" }
{ "created_at": "2015-02-27T22:29:06.719801", "description": "Utilite to backup items from Google Reader", "fork": false, "full_name": "wistful/grbackup", "language": "Python", "updated_at": "2015-02-27T23:43:57.928050" }
{ "pile_set_name": "Github" }
namespace Ombi.Notifications.Templates { public interface INewsletterTemplate { string LoadTemplate(string subject, string intro, string tableHtml, string logo); } }
{ "pile_set_name": "Github" }
/*global QUnit*/ sap.ui.define([ "sap/ui/thirdparty/jquery", "sap/ui/core/util/reflection/JsControlTreeModifier", "sap/ui/fl/registry/ChangeRegistry", "sap/ui/fl/registry/ChangeRegistryItem", "sap/ui/fl/registry/SimpleChanges", "sap/ui/fl/changeHandler/MoveControls", "sap/ui/fl/changeHandler/AddXML", "sap/ui/fl/changeHandler/UnhideControl", "sap/ui/fl/changeHandler/HideControl", "sap/ui/fl/Layer", "sap/base/Log", "sap/ui/thirdparty/sinon-4" ], function( jQuery, JsControlTreeModifier, ChangeRegistry, ChangeRegistryItem, SimpleChanges, MoveControlsChangeHandler, AddXMLChangeHandler, UnhideControlChangeHandler, HideControlChangeHandler, Layer, Log, sinon ) { "use strict"; var sandbox = sinon.sandbox.create(); QUnit.module("sap.ui.fl.registry.ChangeRegistry", { beforeEach: function () { this.oChangeRegistry = new ChangeRegistry(); }, afterEach: function () { sandbox.restore(); } }, function() { QUnit.test("getInstance", function (assert) { var changeRegistryInstance = ChangeRegistry.getInstance(); assert.ok(changeRegistryInstance); }); QUnit.test("on load uxap changeHandler are registered", function (assert) { var changeRegistryInstance = ChangeRegistry.getInstance(); assert.ok(changeRegistryInstance._registeredItems, "sap.uxap.ObjectPageLayout"); assert.ok(changeRegistryInstance._registeredItems, "sap.uxap.ObjectPageSection"); }); QUnit.test("constructor", function (assert) { assert.ok(this.oChangeRegistry); assert.deepEqual(this.oChangeRegistry._registeredItems, {}); }); QUnit.test("addRegistryItem", function (assert) { var registryItem = { getControlType: function () { return "sap.ui.fl.DummyControl"; }, getChangeTypeName: function () { return "myChangeType"; } }; this.oChangeRegistry.addRegistryItem(registryItem); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems).length, 1); assert.strictEqual(this.oChangeRegistry._registeredItems["sap.ui.fl.DummyControl"]["myChangeType"], registryItem); }); QUnit.test("removeRegistryItem - remove complete item", function (assert) { var registryItem = { getControlType: function () { return "sap.ui.fl.DummyControl"; }, getChangeTypeName: function () { return "myChangeType"; } }; this.oChangeRegistry.addRegistryItem(registryItem); var mParam = { controlType: "sap.ui.fl.DummyControl", changeTypeName: "myChangeType" }; this.oChangeRegistry.removeRegistryItem(mParam); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems).length, 0); }); QUnit.test("removeRegistryItem - remove changetypemetadata only", function (assert) { var registryItem1 = { getControlType: function () { return "sap.ui.fl.DummyControl"; }, getChangeTypeName: function () { return "myChangeType"; } }; var registryItem2 = { getControlType: function () { return "sap.ui.fl.DummyControlGroup"; }, getChangeTypeName: function () { return "myChangeType"; } }; this.oChangeRegistry.addRegistryItem(registryItem1); this.oChangeRegistry.addRegistryItem(registryItem2); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems).length, 2); var mParam = { changeTypeName: "myChangeType" }; this.oChangeRegistry.removeRegistryItem(mParam); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems).length, 2); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems["sap.ui.fl.DummyControl"]).length, 0); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems["sap.ui.fl.DummyControlGroup"]).length, 0); }); QUnit.test("removeRegistryItem - remove complete controltype", function (assert) { var registryItem1 = { getControlType: function () { return "sap.ui.fl.DummyControl"; }, getChangeTypeName: function () { return "myChangeType1"; } }; var registryItem2 = { getControlType: function () { return "sap.ui.fl.DummyControlGroup"; }, getChangeTypeName: function () { return "myChangeType2"; } }; this.oChangeRegistry.addRegistryItem(registryItem1); this.oChangeRegistry.addRegistryItem(registryItem2); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems).length, 2); var mParam = { controlType: "sap.ui.fl.DummyControl" }; this.oChangeRegistry.removeRegistryItem(mParam); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems).length, 1); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems["sap.ui.fl.DummyControlGroup"]).length, 1); assert.deepEqual(this.oChangeRegistry._registeredItems["sap.ui.fl.DummyControlGroup"]["myChangeType2"], registryItem2); }); QUnit.test("_createChangeRegistryItemForSimpleChange - when we register a change with an unsupported layer in change.layers", function(assert) { var simpleDummyControlChange1 = { changeType: "myChangeType1", changeHandler: {}, // stub layers: { unsupportedLayer: true } }; assert.throws(function() { this.oChangeRegistry._createChangeRegistryItemForSimpleChange("sap.ui.fl.DummyControl1", simpleDummyControlChange1); }, "then we throw an error"); }); QUnit.test("registerChangeHandlersForControl understands 'default' as a parameter", function (assert) { var someChangeType = "someChange"; var sSomeChangeModuleName = "some/module/name"; var sHideControlChangeType = "hideControl"; var sControlType = "my.control.Implementation"; var oChangeHandlers = {}; oChangeHandlers[someChangeType] = sSomeChangeModuleName; oChangeHandlers[sHideControlChangeType] = "default"; var registerControlStub = sandbox.stub(this.oChangeRegistry, "registerControlForSimpleChange"); return this.oChangeRegistry._registerChangeHandlersForControl(sControlType, oChangeHandlers) .then(function() { assert.equal(registerControlStub.callCount, 2, "two change handlers were registered for the control"); assert.equal(registerControlStub.firstCall.args[0], sControlType, "the first registration was for the passed control"); assert.equal(registerControlStub.firstCall.args[1].changeType, someChangeType, "the some change type was registered"); assert.equal(registerControlStub.firstCall.args[1].changeHandler, sSomeChangeModuleName, "the 'some/module/name' module was registerd for the 'some change' type"); assert.equal(registerControlStub.secondCall.args[0], sControlType, "the second registration was for the passed control"); assert.equal(registerControlStub.secondCall.args[1].changeType, sHideControlChangeType, "the hideControl change type was registered"); assert.equal(registerControlStub.secondCall.args[1].changeHandler, this.oChangeRegistry._oDefaultChangeHandlers[sHideControlChangeType], "the default change handler was registerd for the 'hideControl' type"); }.bind(this)); }); QUnit.test("registerChangeHandlersForControl understands {changeHandler: 'default'} as a parameter", function (assert) { var someChangeType = "someChange"; var sSomeChangeModuleName = "some/module/name"; var sHideControlChangeType = "hideControl"; var sControlType = "my.control.Implementation"; var oChangeHandlers = {}; oChangeHandlers[someChangeType] = sSomeChangeModuleName; oChangeHandlers[sHideControlChangeType] = { changeHandler: "default" }; var registerControlStub = sandbox.stub(this.oChangeRegistry, "registerControlForSimpleChange"); return this.oChangeRegistry._registerChangeHandlersForControl(sControlType, oChangeHandlers) .then(function() { assert.equal(registerControlStub.callCount, 2, "two change handlers were registered for the control"); assert.equal(registerControlStub.firstCall.args[0], sControlType, "the first registration was for the passed control"); assert.equal(registerControlStub.firstCall.args[1].changeType, someChangeType, "the some change type was registered"); assert.equal(registerControlStub.firstCall.args[1].changeHandler, sSomeChangeModuleName, "the 'some/module/name' module was registerd for the 'some change' type"); assert.equal(registerControlStub.secondCall.args[0], sControlType, "the second registration was for the passed control"); assert.equal(registerControlStub.secondCall.args[1].changeType, sHideControlChangeType, "the hideControl change type was registered"); assert.equal(registerControlStub.secondCall.args[1].changeHandler, this.oChangeRegistry._oDefaultChangeHandlers[sHideControlChangeType], "the default change handler was registerd for the 'hideControl' type"); }.bind(this)); }); QUnit.test("registerChangeHandlersForControl understands a module path as a parameter", function (assert) { var sControlType = "my.control.Implementation"; var oChangeHandlers = "sap/ui/fl/test/registry/TestChangeHandlers"; var registerControlStub = sandbox.stub(this.oChangeRegistry, "registerControlForSimpleChange"); return this.oChangeRegistry._registerChangeHandlersForControl(sControlType, oChangeHandlers) .then(function() { assert.equal(registerControlStub.callCount, 2, "two change handlers were registered for the control"); assert.equal(registerControlStub.firstCall.args[0], sControlType, "the first registration was for the passed control"); assert.equal(registerControlStub.firstCall.args[1].changeType, "doSomething", "the some change type was registered"); assert.equal(registerControlStub.secondCall.args[0], sControlType, "the second registration was for the passed control"); assert.equal(registerControlStub.secondCall.args[1].changeType, "doSomethingElse", "the hideControl change type was registered"); }); }); QUnit.test("registerChangeHandlersForControl does not crash if the loading of a module path leads to an error (file not found)", function (assert) { assert.expect(3); var sControlType = "my.control.Implementation"; var oChangeHandlers = "sap/ui/fl/test/registry/DefinitelyNotAChangeHandlers"; var fnRegisterControlStub = sandbox.stub(this.oChangeRegistry, "registerControlForSimpleChange"); sandbox.stub(Log, "error").callsFake(function (sErrorMessage) { if (sErrorMessage.indexOf(sControlType) !== -1) { assert.ok(true, "then error was logged"); } }); return this.oChangeRegistry._registerChangeHandlersForControl(sControlType, oChangeHandlers) .then(function() { assert.ok(true, "the js processing continues"); assert.equal(fnRegisterControlStub.callCount, 0, "no registration was done"); }); }); QUnit.test("registerChangeHandlersForControl does not crash if the loading of a module path leads to an error (broken file)", function (assert) { var sControlType = "my.control.Implementation"; var sChangeHandler = "sap/ui/fl/test/registry/TestChangeHandlersBROKEN"; var registerControlStub = sandbox.stub(this.oChangeRegistry, "registerControlForSimpleChange"); var errorLoggingStub = sandbox.stub(Log, "error"); sandbox.stub(sap.ui, "require") .callsArgWithAsync(2, {message: "error"}); return this.oChangeRegistry._registerChangeHandlersForControl(sControlType, sChangeHandler) .then(function() { assert.ok(true, "the js processing continues"); assert.equal(registerControlStub.callCount, 0, "no registration was done"); assert.equal(errorLoggingStub.callCount, 1, "the error was logged"); }); }); QUnit.test("registerControlsForChanges shall add a map of controls and changes to the registry", function (assert) { var sLayer = Layer.CUSTOMER; return this.oChangeRegistry.registerControlsForChanges({ controlA: [SimpleChanges.unhideControl, SimpleChanges.hideControl], controlB: [SimpleChanges.unhideControl, SimpleChanges.hideControl] }) .then(this.oChangeRegistry.getChangeHandler.bind(this.oChangeRegistry, "unhideControl", "controlA", undefined, JsControlTreeModifier, sLayer)) .then(function (oChangeHandler) { assert.strictEqual(oChangeHandler, UnhideControlChangeHandler, "then the corresponding changehandler is registered in a new registry item."); }) .then(this.oChangeRegistry.getChangeHandler.bind(this.oChangeRegistry, "hideControl", "controlA", undefined, JsControlTreeModifier, sLayer)) .then(function (oChangeHandler) { assert.strictEqual(oChangeHandler, HideControlChangeHandler, "then the corresponding changehandler is registered in a new registry item."); }) .then(this.oChangeRegistry.getChangeHandler.bind(this.oChangeRegistry, "unhideControl", "controlB", undefined, JsControlTreeModifier, sLayer)) .then(function (oChangeHandler) { assert.strictEqual(oChangeHandler, UnhideControlChangeHandler, "then the corresponding changehandler is registered in a new registry item."); }) .then(this.oChangeRegistry.getChangeHandler.bind(this.oChangeRegistry, "hideControl", "controlB", undefined, JsControlTreeModifier, sLayer)) .then(function (oChangeHandler) { assert.strictEqual(oChangeHandler, HideControlChangeHandler, "then the corresponding changehandler is registered in a new registry item."); }); }); QUnit.test("registerControlsForChanges: when adding a propertyChange or propertyBindingChange without 'default' changeHandler", function (assert) { return this.oChangeRegistry.registerControlsForChanges({ controlA: { propertyChange: { changeHandler: {} } } }) .catch(function() { assert.ok(true, "then it should reject the promise"); }) .then(function() { return this.oChangeRegistry.registerControlsForChanges({ controlA: { propertyBindingChange: { changeHandler: {} } } }); }.bind(this)) .catch(function() { assert.ok(true, "then it should reject the promise"); }); }); QUnit.test("registerControlForSimpleChange shall do nothing if mandatory parameters are missing", function (assert) { this.oChangeRegistry.registerControlForSimpleChange(null, null); assert.strictEqual(Object.keys(this.oChangeRegistry._registeredItems).length, 0, "There shall be no registered items"); }); QUnit.test("registerControlForSimpleChange shall add a new registry item", function (assert) { var sLayer = Layer.CUSTOMER; this.oChangeRegistry.registerControlForSimpleChange("ganttChart", SimpleChanges.unhideControl); return this.oChangeRegistry.getChangeHandler("unhideControl", "ganttChart", undefined, JsControlTreeModifier, sLayer) .then(function (oChangeHandler) { assert.strictEqual(oChangeHandler, UnhideControlChangeHandler, "then the corresponding changehandler is registered in a new registry item."); }); }); QUnit.test("can determine if a given control has registered change handlers", function (assert) { var sControlType = "sap.ui.fl.DummyControl"; var registryItem = { getControlType: function () { return sControlType; }, getChangeTypeName: function () { return "myChangeType"; } }; this.oChangeRegistry.addRegistryItem(registryItem); var bHasRegisteredChangeHandlers = this.oChangeRegistry.hasRegisteredChangeHandlersForControl(sControlType); assert.strictEqual(bHasRegisteredChangeHandlers, true, "the registry tells that there is a registered change handler for the given control"); }); QUnit.test("can determine if a given control has NO registered change handlers", function (assert) { var sControlType = "sap.ui.fl.DummyControl"; var sSomeOtherControlType = "sap.ui.fl.DummyControlWithNoHandlers"; var registryItem = { getControlType: function () { return sControlType; }, getChangeTypeName: function () { return "myChangeType"; } }; this.oChangeRegistry.addRegistryItem(registryItem); var bHasRegisteredChangeHandlers = this.oChangeRegistry.hasRegisteredChangeHandlersForControl(sSomeOtherControlType); assert.strictEqual(bHasRegisteredChangeHandlers, false, "the registry tells that there is NO a registered change handler for the given control"); }); QUnit.test("can determine if a given control has a change handler for a specific type of changes", function (assert) { var sControlType = "sap.ui.fl.DummyControl"; var sChangeType = "myChangeType"; var registryItem = { getControlType: function () { return sControlType; }, getChangeTypeName: function () { return sChangeType; } }; this.oChangeRegistry.addRegistryItem(registryItem); var bHasRegisteredChangeHandlers = this.oChangeRegistry.hasChangeHandlerForControlAndChange(sControlType, sChangeType); assert.strictEqual(bHasRegisteredChangeHandlers, true, "the registry tells that there is a registered change handler for the given control and change"); }); QUnit.test("can determine if a given control has NOT a change handler for a specific change type if it has no change handlers at all registered for that control", function (assert) { var sControlType = "sap.ui.fl.DummyControl"; var sChangeType = "myChangeType"; var sSomeOtherControlType = "sap.ui.fl.DummyControlWithNoHandlers"; var registryItem = { getControlType: function () { return sControlType; }, getChangeTypeName: function () { return sChangeType; } }; this.oChangeRegistry.addRegistryItem(registryItem); var bHasRegisteredChangeHandlers = this.oChangeRegistry.hasChangeHandlerForControlAndChange(sSomeOtherControlType, sChangeType); assert.strictEqual(bHasRegisteredChangeHandlers, false, "the registry tells that there is NO registered change handler for the given control and change"); }); QUnit.test("can determine if a given control has NOT a change handler for a specific change type if it has some change handlers registered for other change types for that control", function (assert) { var sControlType = "sap.ui.fl.DummyControl"; var sChangeType = "myChangeType"; var sSomeOtherChangeType = "myOtherChangeType"; var registryItem = { getControlType: function () { return sControlType; }, getChangeTypeName: function () { return sChangeType; } }; this.oChangeRegistry.addRegistryItem(registryItem); var bHasRegisteredChangeHandlers = this.oChangeRegistry.hasChangeHandlerForControlAndChange(sControlType, sSomeOtherChangeType); assert.strictEqual(bHasRegisteredChangeHandlers, false, "the registry tells that there is NO registered change handler for the given control and change"); }); QUnit.test("can determine if a given control has NOT a change handler for a specific control if neither the control has registered change handlers nor the change handler is registered anywhere else", function (assert) { var sControlType = "sap.ui.fl.DummyControl"; var sChangeType = "myChangeType"; var sSomeOtherControlType = "sap.ui.fl.DummyControlWithNoHandlers"; var sSomeOtherChangeType = "myOtherChangeType"; var registryItem = { getControlType: function () { return sControlType; }, getChangeTypeName: function () { return sChangeType; } }; this.oChangeRegistry.addRegistryItem(registryItem); var bHasRegisteredChangeHandlers = this.oChangeRegistry.hasChangeHandlerForControlAndChange(sSomeOtherControlType, sSomeOtherChangeType); assert.strictEqual(bHasRegisteredChangeHandlers, false, "the registry tells that there is NO registered change handler for the given control and change"); }); QUnit.test("returns the property change handler for a control not having any explicit registered change handlers", function (assert) { var sControlType = "aControlType"; var sPropertyChangeType = "propertyChange"; var mRegistryItem = this.oChangeRegistry._getRegistryItem(sControlType, sPropertyChangeType); assert.equal(mRegistryItem, this.oChangeRegistry._oDefaultActiveChangeHandlers.propertyChange, "the default property change handler was retrieved"); }); QUnit.test("returns the property change handler for a control having other registered change handlers", function (assert) { var sControlType = "aControlType"; var sPropertyChangeType = "propertyChange"; this.oChangeRegistry._registeredItems[sControlType] = { someOtherChange: {} }; var mRegistryItem = this.oChangeRegistry._getRegistryItem(sControlType, sPropertyChangeType); assert.equal(mRegistryItem, this.oChangeRegistry._oDefaultActiveChangeHandlers.propertyChange, "the default property change handler was retrieved"); }); QUnit.test("returns the explicit for a given control type registered change handler for the property changes", function (assert) { var sControlType = "aControlType"; var sPropertyChangeType = "propertyChange"; var oExplicitRegisteredChangeHandlerStub = {}; var oSimpleChangeObject = { changeType: sPropertyChangeType, changeHandler: oExplicitRegisteredChangeHandlerStub }; var oChangeRegistryItem = this.oChangeRegistry._createChangeRegistryItemForSimpleChange(sControlType, oSimpleChangeObject); this.oChangeRegistry._registeredItems[sControlType] = { someOtherChange: {}, propertyChange: oChangeRegistryItem }; var mRegistryItem = this.oChangeRegistry._getRegistryItem(sControlType, sPropertyChangeType); assert.equal(mRegistryItem, oChangeRegistryItem, "the explicit registered change handler item was retrieved"); }); QUnit.test("returns the property binding change handler for a control not having any explicit registered change handlers", function (assert) { var sControlType = "aControlType"; var sPropertyBindingChangeType = "propertyBindingChange"; var mRegistryItem = this.oChangeRegistry._getRegistryItem(sControlType, sPropertyBindingChangeType); assert.equal(mRegistryItem, this.oChangeRegistry._oDefaultActiveChangeHandlers.propertyBindingChange, "the default property binding change handler was retrieved"); }); QUnit.test("returns the property change handler for a control having other registered change handlers", function (assert) { var sControlType = "aControlType"; var sPropertyBindingChangeType = "propertyBindingChange"; this.oChangeRegistry._registeredItems[sControlType] = { someOtherChange: {} }; var mRegistryItem = this.oChangeRegistry._getRegistryItem(sControlType, sPropertyBindingChangeType); assert.equal(mRegistryItem, this.oChangeRegistry._oDefaultActiveChangeHandlers.propertyBindingChange, "the default property binding change handler was retrieved"); }); QUnit.test("returns the explicit for a given control type registered change handler for the property binding changes", function (assert) { var sControlType = "aControlType"; var sPropertyBindingChangeType = "propertyBindingChange"; var oExplicitRegisteredChangeHandlerStub = {}; var oSimpleChangeObject = { changeType: sPropertyBindingChangeType, changeHandler: oExplicitRegisteredChangeHandlerStub }; var oChangeRegistryItem = this.oChangeRegistry._createChangeRegistryItemForSimpleChange(sControlType, oSimpleChangeObject); this.oChangeRegistry._registeredItems[sControlType] = { someOtherChange: {}, propertyBindingChange: oChangeRegistryItem }; var mRegistryItem = this.oChangeRegistry._getRegistryItem(sControlType, sPropertyBindingChangeType); assert.equal(mRegistryItem, oChangeRegistryItem, "the explicit registered change handler item was retrieved"); }); QUnit.test("when _getInstanceSpecificChangeRegistryItem is called without flexibility path defined on given control", function (assert) { var oGetChangeHandlerModuleStub = sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns(null); var oControl = {}; var oSimpleChangeObject = {}; return this.oChangeRegistry._getInstanceSpecificChangeRegistryItem(oSimpleChangeObject, oControl, JsControlTreeModifier) .then(function(oChangeRegistryItem) { assert.equal(oGetChangeHandlerModuleStub.callCount, 1, "then getChangeHandlerModule function is called"); assert.equal(oChangeRegistryItem, undefined, "then no registry item is returned"); }); }); QUnit.test("when _getInstanceSpecificChangeRegistryItem is called with invalid flexibility path defined on given control", function (assert) { assert.expect(3); var oGetChangeHandlerModuleStub = sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns("invalid/path/TestChangeHandlers"); var oControl = {}; var sControlId = "controlId"; var sPropertyBindingChangeType = "propertyBindingChange"; var oExplicitRegisteredChangeHandlerStub = {}; var oSimpleChangeObject = { changeType: sPropertyBindingChangeType, changeHandler: oExplicitRegisteredChangeHandlerStub }; sandbox.stub(JsControlTreeModifier, "getId").returns(sControlId); sandbox.stub(Log, "error").callsFake(function(sErrorMessage) { if (sErrorMessage.indexOf(sControlId) !== -1) { assert.ok(true, "then error was logged"); } }); return this.oChangeRegistry._getInstanceSpecificChangeRegistryItem(oSimpleChangeObject, oControl, JsControlTreeModifier) .then(function(oChangeRegistryItem) { assert.equal(oGetChangeHandlerModuleStub.callCount, 1, "then getChangeHandlerModule function is called"); assert.equal(oChangeRegistryItem, undefined, "then no registry item is returned"); }); }); QUnit.test("when _getInstanceSpecificChangeRegistryItem is called and passed parameter is a valid changeType", function (assert) { var oErrorLoggingStub = sandbox.stub(Log, "error"); sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns("sap/ui/fl/test/registry/TestChangeHandlers.flexibility"); sandbox.stub(JsControlTreeModifier, "getControlType").returns("controlType"); var oControl = {}; var sChangeType = "doSomething"; return this.oChangeRegistry._getInstanceSpecificChangeRegistryItem(sChangeType, oControl, JsControlTreeModifier) .then(function(oChangeRegistryItem) { // assert.equal(oGetChangeHandlerModuleStub.callCount, 1, "then getChangeHandlerModule function is called"); assert.equal(oErrorLoggingStub.callCount, 0, "then no error was logged"); assert.ok(oChangeRegistryItem instanceof ChangeRegistryItem, "then registry item is returned"); assert.equal(oChangeRegistryItem.getChangeTypeName(), sChangeType, "then returned registry item has the correct changeType"); }); }); QUnit.test("when _getInstanceSpecificChangeRegistryItem is called and passed parameter is a change with a valid changeType", function (assert) { var oErrorLoggingStub = sandbox.stub(Log, "error"); var oGetChangeHandlerModuleStub = sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns("sap/ui/fl/test/registry/TestChangeHandlers.flexibility"); sandbox.stub(JsControlTreeModifier, "getControlType").returns("controlType"); var oControl = {}; var sChangeType = "doSomethingElse"; return this.oChangeRegistry._getInstanceSpecificChangeRegistryItem(sChangeType, oControl, JsControlTreeModifier) .then(function(oChangeRegistryItem) { assert.equal(oGetChangeHandlerModuleStub.callCount, 1, "then getChangeHandlerModule function is called"); assert.equal(oErrorLoggingStub.callCount, 0, "then no error was logged"); assert.ok(oChangeRegistryItem instanceof ChangeRegistryItem, "then registry item is returned"); assert.equal(oChangeRegistryItem.getChangeTypeName(), sChangeType, "then returned registry item has the correct changeType"); }); }); QUnit.test("when getChangeHandler is called for a control without instance specific changeHandler", function (assert) { var oControl = {}; var sChangeType = "moveControls"; var sControlType = "VerticalLayout"; var sLayer = Layer.CUSTOMER; var oErrorLoggingStub; var oGetChangeHandlerModuleStub; return this.oChangeRegistry.registerControlsForChanges({ VerticalLayout : { moveControls: "default" } }) .then(function() { oErrorLoggingStub = sandbox.stub(Log, "error"); oGetChangeHandlerModuleStub = sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns("sap/ui/fl/test/registry/TestChangeHandlers.flexibility"); sandbox.stub(JsControlTreeModifier, "getControlType").returns(sControlType); return this.oChangeRegistry.getChangeHandler(sChangeType, sControlType, oControl, JsControlTreeModifier, sLayer); }.bind(this)) .then(function(oChangeHandler) { assert.equal(oGetChangeHandlerModuleStub.callCount, 1, "then getChangeHandlerModule function is called"); assert.equal(oErrorLoggingStub.callCount, 0, "then no error was logged"); assert.equal(oChangeHandler, MoveControlsChangeHandler, "then correct changehandler is returned"); }); }); QUnit.test("when getChangeHandler is called for a control with instance specific and default changeHandlers", function (assert) { var oControl = {}; var sChangeType = "doSomething"; var sControlType = "VerticalLayout"; var sLayer = Layer.CUSTOMER; sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns("sap/ui/fl/test/registry/TestChangeHandlers.flexibility"); sandbox.stub(JsControlTreeModifier, "getControlType").returns("VerticalLayout"); return this.oChangeRegistry.registerControlsForChanges({ VerticalLayout : { doSomething: "default" } }) .then(function() { return this.oChangeRegistry.getChangeHandler(sChangeType, sControlType, oControl, JsControlTreeModifier, sLayer); }.bind(this)) .then(function(oChangeHandler) { assert.equal(oChangeHandler.dummyId, "testChangeHandler-doSomething", "then instance specific changehandler is returned"); }); }); QUnit.test("when getChangeHandler is called for a control with instance specific changeHandler but with the wrong layer", function (assert) { var oControl = {}; var sChangeType = "doSomething"; var sControlType = "VerticalLayout"; var sLayer = Layer.CUSTOMER; sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns("sap/ui/fl/test/registry/TestChangeHandlersUserLayer.flexibility"); sandbox.stub(JsControlTreeModifier, "getControlType").returns("VerticalLayout"); return this.oChangeRegistry.getChangeHandler(sChangeType, sControlType, oControl, JsControlTreeModifier, sLayer) .catch(function(oError) { assert.equal(oError.message, "Change type " + sChangeType + " not enabled for layer " + sLayer, "then an error is thrown"); }); }); QUnit.test("when getChangeHandler is called for previously existing changetype and existing instance specific changehandler for another changetype", function (assert) { var oControl = {}; var sChangeType = "moveControls"; var sControlType = "VerticalLayout"; var sLayer = Layer.CUSTOMER; sandbox.stub(JsControlTreeModifier, "getChangeHandlerModulePath").returns("sap/ui/fl/test/registry/TestChangeHandlers.flexibility"); sandbox.stub(JsControlTreeModifier, "getControlType").returns("VerticalLayout"); return this.oChangeRegistry.registerControlsForChanges({ VerticalLayout : { moveControls: "default" } }) .then(function() { return this.oChangeRegistry.getChangeHandler(sChangeType, sControlType, oControl, JsControlTreeModifier, sLayer); }.bind(this)) .then(function(oChangeHandler) { assert.equal(oChangeHandler, MoveControlsChangeHandler, "then correct default changehandler is returned"); }); }); QUnit.test("when getChangeHandler is called for without a handler registered for the control", function (assert) { var oControl = {}; var sChangeType = "moveControls"; var sControlType = "VerticalLayout"; var sLayer = Layer.CUSTOMER; sandbox.stub(JsControlTreeModifier, "getControlType").returns("VerticalLayout"); return this.oChangeRegistry.getChangeHandler(sChangeType, sControlType, oControl, JsControlTreeModifier, sLayer) .then(function() { assert.ok(false, "should not resolve"); }) .catch(function(oError) { assert.ok(oError, "the function rejects with an error"); assert.ok(oError.message.indexOf("No Change handler registered for the Control and Change type") > -1, "the error contains the correct message"); }); }); QUnit.test("when getChangeHandler is called without control type specified", function (assert) { var oControl = {}; var sChangeType = "addXML"; var sControlType; var sLayer = Layer.VENDOR; return this.oChangeRegistry.getChangeHandler(sChangeType, sControlType, oControl, JsControlTreeModifier, sLayer) .then(function(oChangeHandler) { assert.strictEqual(oChangeHandler, AddXMLChangeHandler, "then the function should return the change handler specified just by change type"); }) .catch(function(oError) { assert.notOk(oError, "then the function should not rejects with an error"); }); }); QUnit.test("when getChangeHandler is called with invalid layer specified", function (assert) { var sControlType = "aControlType"; var sPropertyBindingChangeType = "propertyBindingChange"; var oExplicitRegisteredChangeHandlerStub = {}; var sLayer = "INVALIDLAYER"; var oSimpleChangeObject = { changeType: sPropertyBindingChangeType, changeHandler: oExplicitRegisteredChangeHandlerStub }; this.oChangeRegistry.registerControlForSimpleChange(sControlType, oSimpleChangeObject); return this.oChangeRegistry.getChangeHandler(sPropertyBindingChangeType, sControlType, undefined, JsControlTreeModifier, sLayer) .catch(function (oError) { assert.strictEqual(oError.message, "Change type " + sPropertyBindingChangeType + " not enabled for layer " + sLayer, "then an error is thrown"); }); }); }); QUnit.done(function () { jQuery("#qunit-fixture").hide(); }); });
{ "pile_set_name": "Github" }
function data = loadjson(fname,varargin) % % data=loadjson(fname,opt) % or % data=loadjson(fname,'param1',value1,'param2',value2,...) % % parse a JSON (JavaScript Object Notation) file or string % % authors:Qianqian Fang (fangq<at> nmr.mgh.harvard.edu) % created on 2011/09/09, including previous works from % % Nedialko Krouchev: http://www.mathworks.com/matlabcentral/fileexchange/25713 % created on 2009/11/02 % François Glineur: http://www.mathworks.com/matlabcentral/fileexchange/23393 % created on 2009/03/22 % Joel Feenstra: % http://www.mathworks.com/matlabcentral/fileexchange/20565 % created on 2008/07/03 % % $Id: loadjson.m 460 2015-01-03 00:30:45Z fangq $ % % input: % fname: input file name, if fname contains "{}" or "[]", fname % will be interpreted as a JSON string % opt: a struct to store parsing options, opt can be replaced by % a list of ('param',value) pairs - the param string is equivallent % to a field in opt. opt can have the following % fields (first in [.|.] is the default) % % opt.SimplifyCell [0|1]: if set to 1, loadjson will call cell2mat % for each element of the JSON data, and group % arrays based on the cell2mat rules. % opt.FastArrayParser [1|0 or integer]: if set to 1, use a % speed-optimized array parser when loading an % array object. The fast array parser may % collapse block arrays into a single large % array similar to rules defined in cell2mat; 0 to % use a legacy parser; if set to a larger-than-1 % value, this option will specify the minimum % dimension to enable the fast array parser. For % example, if the input is a 3D array, setting % FastArrayParser to 1 will return a 3D array; % setting to 2 will return a cell array of 2D % arrays; setting to 3 will return to a 2D cell % array of 1D vectors; setting to 4 will return a % 3D cell array. % opt.ShowProgress [0|1]: if set to 1, loadjson displays a progress bar. % % output: % dat: a cell array, where {...} blocks are converted into cell arrays, % and [...] are converted to arrays % % examples: % dat=loadjson('{"obj":{"string":"value","array":[1,2,3]}}') % dat=loadjson(['examples' filesep 'example1.json']) % dat=loadjson(['examples' filesep 'example1.json'],'SimplifyCell',1) % % license: % BSD, see LICENSE_BSD.txt files for details % % -- this function is part of JSONLab toolbox (http://iso2mesh.sf.net/cgi-bin/index.cgi?jsonlab) % global pos inStr len esc index_esc len_esc isoct arraytoken if(regexp(fname,'[\{\}\]\[]','once')) string=fname; elseif(exist(fname,'file')) fid = fopen(fname,'rb'); string = fread(fid,inf,'uint8=>char')'; fclose(fid); else error('input file does not exist'); end pos = 1; len = length(string); inStr = string; isoct=exist('OCTAVE_VERSION','builtin'); arraytoken=find(inStr=='[' | inStr==']' | inStr=='"'); jstr=regexprep(inStr,'\\\\',' '); escquote=regexp(jstr,'\\"'); arraytoken=sort([arraytoken escquote]); % String delimiters and escape chars identified to improve speed: esc = find(inStr=='"' | inStr=='\' ); % comparable to: regexp(inStr, '["\\]'); index_esc = 1; len_esc = length(esc); opt=varargin2struct(varargin{:}); if(jsonopt('ShowProgress',0,opt)==1) opt.progressbar_=waitbar(0,'loading ...'); end jsoncount=1; while pos <= len switch(next_char) case '{' data{jsoncount} = parse_object(opt); case '[' data{jsoncount} = parse_array(opt); otherwise error_pos('Outer level structure must be an object or an array'); end jsoncount=jsoncount+1; end % while jsoncount=length(data); if(jsoncount==1 && iscell(data)) data=data{1}; end if(~isempty(data)) if(isstruct(data)) % data can be a struct array data=jstruct2array(data); elseif(iscell(data)) data=jcell2array(data); end end if(isfield(opt,'progressbar_')) close(opt.progressbar_); end %% function newdata=jcell2array(data) len=length(data); newdata=data; for i=1:len if(isstruct(data{i})) newdata{i}=jstruct2array(data{i}); elseif(iscell(data{i})) newdata{i}=jcell2array(data{i}); end end %%------------------------------------------------------------------------- function newdata=jstruct2array(data) fn=fieldnames(data); newdata=data; len=length(data); for i=1:length(fn) % depth-first for j=1:len if(isstruct(getfield(data(j),fn{i}))) newdata(j)=setfield(newdata(j),fn{i},jstruct2array(getfield(data(j),fn{i}))); end end end if(~isempty(strmatch('x0x5F_ArrayType_',fn)) && ~isempty(strmatch('x0x5F_ArrayData_',fn))) newdata=cell(len,1); for j=1:len ndata=cast(data(j).x0x5F_ArrayData_,data(j).x0x5F_ArrayType_); iscpx=0; if(~isempty(strmatch('x0x5F_ArrayIsComplex_',fn))) if(data(j).x0x5F_ArrayIsComplex_) iscpx=1; end end if(~isempty(strmatch('x0x5F_ArrayIsSparse_',fn))) if(data(j).x0x5F_ArrayIsSparse_) if(~isempty(strmatch('x0x5F_ArraySize_',fn))) dim=data(j).x0x5F_ArraySize_; if(iscpx && size(ndata,2)==4-any(dim==1)) ndata(:,end-1)=complex(ndata(:,end-1),ndata(:,end)); end if isempty(ndata) % All-zeros sparse ndata=sparse(dim(1),prod(dim(2:end))); elseif dim(1)==1 % Sparse row vector ndata=sparse(1,ndata(:,1),ndata(:,2),dim(1),prod(dim(2:end))); elseif dim(2)==1 % Sparse column vector ndata=sparse(ndata(:,1),1,ndata(:,2),dim(1),prod(dim(2:end))); else % Generic sparse array. ndata=sparse(ndata(:,1),ndata(:,2),ndata(:,3),dim(1),prod(dim(2:end))); end else if(iscpx && size(ndata,2)==4) ndata(:,3)=complex(ndata(:,3),ndata(:,4)); end ndata=sparse(ndata(:,1),ndata(:,2),ndata(:,3)); end end elseif(~isempty(strmatch('x0x5F_ArraySize_',fn))) if(iscpx && size(ndata,2)==2) ndata=complex(ndata(:,1),ndata(:,2)); end ndata=reshape(ndata(:),data(j).x0x5F_ArraySize_); end newdata{j}=ndata; end if(len==1) newdata=newdata{1}; end end %%------------------------------------------------------------------------- function object = parse_object(varargin) parse_char('{'); object = []; if next_char ~= '}' while 1 str = parseStr(varargin{:}); if isempty(str) error_pos('Name of value at position %d cannot be empty'); end parse_char(':'); val = parse_value(varargin{:}); eval( sprintf( 'object.%s = val;', valid_field(str) ) ); if next_char == '}' break; end parse_char(','); end end parse_char('}'); %%------------------------------------------------------------------------- function object = parse_array(varargin) % JSON array is written in row-major order global pos inStr isoct parse_char('['); object = cell(0, 1); dim2=[]; arraydepth=jsonopt('JSONLAB_ArrayDepth_',1,varargin{:}); pbar=jsonopt('progressbar_',-1,varargin{:}); if next_char ~= ']' if(jsonopt('FastArrayParser',1,varargin{:})>=1 && arraydepth>=jsonopt('FastArrayParser',1,varargin{:})) [endpos, e1l, e1r, maxlevel]=matching_bracket(inStr,pos); arraystr=['[' inStr(pos:endpos)]; arraystr=regexprep(arraystr,'"_NaN_"','NaN'); arraystr=regexprep(arraystr,'"([-+]*)_Inf_"','$1Inf'); arraystr(arraystr==sprintf('\n'))=[]; arraystr(arraystr==sprintf('\r'))=[]; %arraystr=regexprep(arraystr,'\s*,',','); % this is slow,sometimes needed if(~isempty(e1l) && ~isempty(e1r)) % the array is in 2D or higher D astr=inStr((e1l+1):(e1r-1)); astr=regexprep(astr,'"_NaN_"','NaN'); astr=regexprep(astr,'"([-+]*)_Inf_"','$1Inf'); astr(astr==sprintf('\n'))=[]; astr(astr==sprintf('\r'))=[]; astr(astr==' ')=''; if(isempty(find(astr=='[', 1))) % array is 2D dim2=length(sscanf(astr,'%f,',[1 inf])); end else % array is 1D astr=arraystr(2:end-1); astr(astr==' ')=''; [obj, count, errmsg, nextidx]=sscanf(astr,'%f,',[1,inf]); if(nextidx>=length(astr)-1) object=obj; pos=endpos; parse_char(']'); return; end end if(~isempty(dim2)) astr=arraystr; astr(astr=='[')=''; astr(astr==']')=''; astr(astr==' ')=''; [obj, count, errmsg, nextidx]=sscanf(astr,'%f,',inf); if(nextidx>=length(astr)-1) object=reshape(obj,dim2,numel(obj)/dim2)'; pos=endpos; parse_char(']'); if(pbar>0) waitbar(pos/length(inStr),pbar,'loading ...'); end return; end end arraystr=regexprep(arraystr,'\]\s*,','];'); else arraystr='['; end try if(isoct && regexp(arraystr,'"','once')) error('Octave eval can produce empty cells for JSON-like input'); end object=eval(arraystr); pos=endpos; catch while 1 newopt=varargin2struct(varargin{:},'JSONLAB_ArrayDepth_',arraydepth+1); val = parse_value(newopt); object{end+1} = val; if next_char == ']' break; end parse_char(','); end end end if(jsonopt('SimplifyCell',0,varargin{:})==1) try oldobj=object; object=cell2mat(object')'; if(iscell(oldobj) && isstruct(object) && numel(object)>1 && jsonopt('SimplifyCellArray',1,varargin{:})==0) object=oldobj; elseif(size(object,1)>1 && ndims(object)==2) object=object'; end catch end end parse_char(']'); if(pbar>0) waitbar(pos/length(inStr),pbar,'loading ...'); end %%------------------------------------------------------------------------- function parse_char(c) global pos inStr len skip_whitespace; if pos > len || inStr(pos) ~= c error_pos(sprintf('Expected %c at position %%d', c)); else pos = pos + 1; skip_whitespace; end %%------------------------------------------------------------------------- function c = next_char global pos inStr len skip_whitespace; if pos > len c = []; else c = inStr(pos); end %%------------------------------------------------------------------------- function skip_whitespace global pos inStr len while pos <= len && isspace(inStr(pos)) pos = pos + 1; end %%------------------------------------------------------------------------- function str = parseStr(varargin) global pos inStr len esc index_esc len_esc % len, ns = length(inStr), keyboard if inStr(pos) ~= '"' error_pos('String starting with " expected at position %d'); else pos = pos + 1; end str = ''; while pos <= len while index_esc <= len_esc && esc(index_esc) < pos index_esc = index_esc + 1; end if index_esc > len_esc str = [str inStr(pos:len)]; pos = len + 1; break; else str = [str inStr(pos:esc(index_esc)-1)]; pos = esc(index_esc); end nstr = length(str); switch inStr(pos) case '"' pos = pos + 1; if(~isempty(str)) if(strcmp(str,'_Inf_')) str=Inf; elseif(strcmp(str,'-_Inf_')) str=-Inf; elseif(strcmp(str,'_NaN_')) str=NaN; end end return; case '\' if pos+1 > len error_pos('End of file reached right after escape character'); end pos = pos + 1; switch inStr(pos) case {'"' '\' '/'} str(nstr+1) = inStr(pos); pos = pos + 1; case {'b' 'f' 'n' 'r' 't'} str(nstr+1) = sprintf(['\' inStr(pos)]); pos = pos + 1; case 'u' if pos+4 > len error_pos('End of file reached in escaped unicode character'); end str(nstr+(1:6)) = inStr(pos-1:pos+4); pos = pos + 5; end otherwise % should never happen str(nstr+1) = inStr(pos), keyboard pos = pos + 1; end end error_pos('End of file while expecting end of inStr'); %%------------------------------------------------------------------------- function num = parse_number(varargin) global pos inStr len isoct currstr=inStr(pos:end); numstr=0; if(isoct~=0) numstr=regexp(currstr,'^\s*-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+\-]?\d+)?','end'); [num, one] = sscanf(currstr, '%f', 1); delta=numstr+1; else [num, one, err, delta] = sscanf(currstr, '%f', 1); if ~isempty(err) error_pos('Error reading number at position %d'); end end pos = pos + delta-1; %%------------------------------------------------------------------------- function val = parse_value(varargin) global pos inStr len true = 1; false = 0; pbar=jsonopt('progressbar_',-1,varargin{:}); if(pbar>0) waitbar(pos/len,pbar,'loading ...'); end switch(inStr(pos)) case '"' val = parseStr(varargin{:}); return; case '[' val = parse_array(varargin{:}); return; case '{' val = parse_object(varargin{:}); if isstruct(val) if(~isempty(strmatch('x0x5F_ArrayType_',fieldnames(val), 'exact'))) val=jstruct2array(val); end elseif isempty(val) val = struct; end return; case {'-','0','1','2','3','4','5','6','7','8','9'} val = parse_number(varargin{:}); return; case 't' if pos+3 <= len && strcmpi(inStr(pos:pos+3), 'true') val = true; pos = pos + 4; return; end case 'f' if pos+4 <= len && strcmpi(inStr(pos:pos+4), 'false') val = false; pos = pos + 5; return; end case 'n' if pos+3 <= len && strcmpi(inStr(pos:pos+3), 'null') val = []; pos = pos + 4; return; end end error_pos('Value expected at position %d'); %%------------------------------------------------------------------------- function error_pos(msg) global pos inStr len poShow = max(min([pos-15 pos-1 pos pos+20],len),1); if poShow(3) == poShow(2) poShow(3:4) = poShow(2)+[0 -1]; % display nothing after end msg = [sprintf(msg, pos) ': ' ... inStr(poShow(1):poShow(2)) '<error>' inStr(poShow(3):poShow(4)) ]; error( ['JSONparser:invalidFormat: ' msg] ); %%------------------------------------------------------------------------- function str = valid_field(str) global isoct % From MATLAB doc: field names must begin with a letter, which may be % followed by any combination of letters, digits, and underscores. % Invalid characters will be converted to underscores, and the prefix % "x0x[Hex code]_" will be added if the first character is not a letter. pos=regexp(str,'^[^A-Za-z]','once'); if(~isempty(pos)) if(~isoct) str=regexprep(str,'^([^A-Za-z])','x0x${sprintf(''%X'',unicode2native($1))}_','once'); else str=sprintf('x0x%X_%s',char(str(1)),str(2:end)); end end if(isempty(regexp(str,'[^0-9A-Za-z_]', 'once' ))) return; end if(~isoct) str=regexprep(str,'([^0-9A-Za-z_])','_0x${sprintf(''%X'',unicode2native($1))}_'); else pos=regexp(str,'[^0-9A-Za-z_]'); if(isempty(pos)) return; end str0=str; pos0=[0 pos(:)' length(str)]; str=''; for i=1:length(pos) str=[str str0(pos0(i)+1:pos(i)-1) sprintf('_0x%X_',str0(pos(i)))]; end if(pos(end)~=length(str)) str=[str str0(pos0(end-1)+1:pos0(end))]; end end %str(~isletter(str) & ~('0' <= str & str <= '9')) = '_'; %%------------------------------------------------------------------------- function endpos = matching_quote(str,pos) len=length(str); while(pos<len) if(str(pos)=='"') if(~(pos>1 && str(pos-1)=='\')) endpos=pos; return; end end pos=pos+1; end error('unmatched quotation mark'); %%------------------------------------------------------------------------- function [endpos, e1l, e1r, maxlevel] = matching_bracket(str,pos) global arraytoken level=1; maxlevel=level; endpos=0; bpos=arraytoken(arraytoken>=pos); tokens=str(bpos); len=length(tokens); pos=1; e1l=[]; e1r=[]; while(pos<=len) c=tokens(pos); if(c==']') level=level-1; if(isempty(e1r)) e1r=bpos(pos); end if(level==0) endpos=bpos(pos); return end end if(c=='[') if(isempty(e1l)) e1l=bpos(pos); end level=level+1; maxlevel=max(maxlevel,level); end if(c=='"') pos=matching_quote(tokens,pos+1); end pos=pos+1; end if(endpos==0) error('unmatched "]"'); end
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.ofbiz.entity.util.EntityUtil import org.apache.ofbiz.entity.condition.EntityCondition import org.apache.ofbiz.entity.condition.EntityOperator shipmentId = parameters.shipmentId items = [] shipment = from("Shipment").where("shipmentId", shipmentId).queryOne() partyId = shipment.partyIdTo shipmentItems = shipment.getRelated("ShipmentItem", null, null, false) shipmentItems.each { shipmentItem -> productId = shipmentItem.productId internalName = shipmentItem.getRelated("Product", null, null, false).internalName EntityCondition cond = EntityCondition.makeCondition([EntityCondition.makeCondition("returnId", shipment.primaryReturnId), EntityCondition.makeCondition("productId", productId)], EntityOperator.AND) returnItem = from("ReturnItem").where("returnId", shipment.primaryReturnId, "productId", productId).cache(true).queryFirst() returnQuantity = Double.valueOf(returnItem.returnQuantity) shipmentItemQty = Double.valueOf(shipmentItem.quantity) itemIssuances = shipmentItem.getRelated("ItemIssuance", [shipmentId : shipmentId, shipmentItemSeqId : shipmentItem.shipmentItemSeqId], ["inventoryItemId"], false) totalQtyIssued = 0 issuedItems = [] itemIssuances.each { itemIssuance -> totalQtyIssued = totalQtyIssued + Double.valueOf(itemIssuance.quantity) issuedItems.add([inventoryItemId : itemIssuance.inventoryItemId, quantity : itemIssuance.quantity]) } qtyStillNeedToBeIssued = returnQuantity - totalQtyIssued items.add([shipmentId : shipmentId, shipmentItemSeqId : shipmentItem.shipmentItemSeqId, returnId : returnItem.returnId, returnItemSeqId : returnItem.returnItemSeqId, orderId : returnItem.orderId, partyId : partyId, productId : productId, internalName : internalName, shipmentItemQty : shipmentItemQty, returnQuantity : returnQuantity, totalQtyIssued : totalQtyIssued, issuedItems : issuedItems, qtyStillNeedToBeIssued : qtyStillNeedToBeIssued, ]) } context.shipmentId = shipmentId context.items = items
{ "pile_set_name": "Github" }
include "sub/include_test2.fbs"; include "sub/include_test2.fbs"; // should be skipped include "include_test1.fbs"; // should be skipped table TableA { b:MyGame.OtherNameSpace.TableB; }
{ "pile_set_name": "Github" }
/** * MK4duo Firmware for 3D Printer, Laser and CNC * * Based on Marlin, Sprinter and grbl * Copyright (c) 2011 Camiel Gubbels / Erik van der Zalm * Copyright (c) 2020 Alberto Cotronei @MagoKimbra * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ /** * mcode * * Copyright (c) 2020 Alberto Cotronei @MagoKimbra */ #define CODE_M204 /** * M204: Set planner.accelerations in units/sec^2 (M204 P1200 T0 R3000 V3000) * * P = Printing moves * T* R = Retract only (no X, Y, Z) moves * V = Travel (non printing) moves * * Also sets minimum segment time in ms (B20000) to prevent buffer under-runs and M20 minimum mechanics.feedrate_mm_s */ inline void gcode_M204() { if (commands.get_target_tool(204)) return; #if DISABLED(DISABLE_M503) // No arguments? Show M204 report. if (parser.seen_any()) { mechanics.print_M204(); return; } #endif if (parser.seen('S')) // Kept for legacy compatibility. Should NOT BE USED for new developments. mechanics.data.travel_acceleration = mechanics.data.acceleration = parser.value_linear_units(); if (parser.seen('P')) mechanics.data.acceleration = parser.value_linear_units(); if (parser.seen('R')) extruders[toolManager.extruder.target]->data.retract_acceleration = parser.value_linear_units(); if (parser.seen('V')) mechanics.data.travel_acceleration = parser.value_linear_units(); }
{ "pile_set_name": "Github" }
<?php /** * This file is part of phpDocumentor. * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * * @copyright 2010-2015 Mike van Riel<[email protected]> * @license http://www.opensource.org/licenses/mit-license.php MIT * @link http://phpdoc.org */ namespace phpDocumentor\Reflection; /** * Interface for files processed by the ProjectFactory */ interface File { /** * Returns the content of the file as a string. * * @return string */ public function getContents(); /** * Returns md5 hash of the file. * * @return string */ public function md5(); /** * Returns an relative path to the file. * * @return string */ public function path(); }
{ "pile_set_name": "Github" }
--- name : dummy_pack_7_name description : dummy pack version : 0.1.0 author : st2-dev email : [email protected]
{ "pile_set_name": "Github" }
/* * Copyright (C) 2013 Boris BREZILLON <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * */ #include <linux/clk-provider.h> #include <linux/clkdev.h> #include <linux/clk/at91_pmc.h> #include <linux/delay.h> #include <linux/of.h> #include <linux/mfd/syscon.h> #include <linux/regmap.h> #include "pmc.h" #define SLOW_CLOCK_FREQ 32768 #define MAINF_DIV 16 #define MAINFRDY_TIMEOUT (((MAINF_DIV + 1) * USEC_PER_SEC) / \ SLOW_CLOCK_FREQ) #define MAINF_LOOP_MIN_WAIT (USEC_PER_SEC / SLOW_CLOCK_FREQ) #define MAINF_LOOP_MAX_WAIT MAINFRDY_TIMEOUT #define MOR_KEY_MASK (0xff << 16) struct clk_main_osc { struct clk_hw hw; struct regmap *regmap; }; #define to_clk_main_osc(hw) container_of(hw, struct clk_main_osc, hw) struct clk_main_rc_osc { struct clk_hw hw; struct regmap *regmap; unsigned long frequency; unsigned long accuracy; }; #define to_clk_main_rc_osc(hw) container_of(hw, struct clk_main_rc_osc, hw) struct clk_rm9200_main { struct clk_hw hw; struct regmap *regmap; }; #define to_clk_rm9200_main(hw) container_of(hw, struct clk_rm9200_main, hw) struct clk_sam9x5_main { struct clk_hw hw; struct regmap *regmap; u8 parent; }; #define to_clk_sam9x5_main(hw) container_of(hw, struct clk_sam9x5_main, hw) static inline bool clk_main_osc_ready(struct regmap *regmap) { unsigned int status; regmap_read(regmap, AT91_PMC_SR, &status); return status & AT91_PMC_MOSCS; } static int clk_main_osc_prepare(struct clk_hw *hw) { struct clk_main_osc *osc = to_clk_main_osc(hw); struct regmap *regmap = osc->regmap; u32 tmp; regmap_read(regmap, AT91_CKGR_MOR, &tmp); tmp &= ~MOR_KEY_MASK; if (tmp & AT91_PMC_OSCBYPASS) return 0; if (!(tmp & AT91_PMC_MOSCEN)) { tmp |= AT91_PMC_MOSCEN | AT91_PMC_KEY; regmap_write(regmap, AT91_CKGR_MOR, tmp); } while (!clk_main_osc_ready(regmap)) cpu_relax(); return 0; } static void clk_main_osc_unprepare(struct clk_hw *hw) { struct clk_main_osc *osc = to_clk_main_osc(hw); struct regmap *regmap = osc->regmap; u32 tmp; regmap_read(regmap, AT91_CKGR_MOR, &tmp); if (tmp & AT91_PMC_OSCBYPASS) return; if (!(tmp & AT91_PMC_MOSCEN)) return; tmp &= ~(AT91_PMC_KEY | AT91_PMC_MOSCEN); regmap_write(regmap, AT91_CKGR_MOR, tmp | AT91_PMC_KEY); } static int clk_main_osc_is_prepared(struct clk_hw *hw) { struct clk_main_osc *osc = to_clk_main_osc(hw); struct regmap *regmap = osc->regmap; u32 tmp, status; regmap_read(regmap, AT91_CKGR_MOR, &tmp); if (tmp & AT91_PMC_OSCBYPASS) return 1; regmap_read(regmap, AT91_PMC_SR, &status); return (status & AT91_PMC_MOSCS) && (tmp & AT91_PMC_MOSCEN); } static const struct clk_ops main_osc_ops = { .prepare = clk_main_osc_prepare, .unprepare = clk_main_osc_unprepare, .is_prepared = clk_main_osc_is_prepared, }; static struct clk_hw * __init at91_clk_register_main_osc(struct regmap *regmap, const char *name, const char *parent_name, bool bypass) { struct clk_main_osc *osc; struct clk_init_data init; struct clk_hw *hw; int ret; if (!name || !parent_name) return ERR_PTR(-EINVAL); osc = kzalloc(sizeof(*osc), GFP_KERNEL); if (!osc) return ERR_PTR(-ENOMEM); init.name = name; init.ops = &main_osc_ops; init.parent_names = &parent_name; init.num_parents = 1; init.flags = CLK_IGNORE_UNUSED; osc->hw.init = &init; osc->regmap = regmap; if (bypass) regmap_update_bits(regmap, AT91_CKGR_MOR, MOR_KEY_MASK | AT91_PMC_MOSCEN, AT91_PMC_OSCBYPASS | AT91_PMC_KEY); hw = &osc->hw; ret = clk_hw_register(NULL, &osc->hw); if (ret) { kfree(osc); hw = ERR_PTR(ret); } return hw; } static void __init of_at91rm9200_clk_main_osc_setup(struct device_node *np) { struct clk_hw *hw; const char *name = np->name; const char *parent_name; struct regmap *regmap; bool bypass; of_property_read_string(np, "clock-output-names", &name); bypass = of_property_read_bool(np, "atmel,osc-bypass"); parent_name = of_clk_get_parent_name(np, 0); regmap = syscon_node_to_regmap(of_get_parent(np)); if (IS_ERR(regmap)) return; hw = at91_clk_register_main_osc(regmap, name, parent_name, bypass); if (IS_ERR(hw)) return; of_clk_add_hw_provider(np, of_clk_hw_simple_get, hw); } CLK_OF_DECLARE(at91rm9200_clk_main_osc, "atmel,at91rm9200-clk-main-osc", of_at91rm9200_clk_main_osc_setup); static bool clk_main_rc_osc_ready(struct regmap *regmap) { unsigned int status; regmap_read(regmap, AT91_PMC_SR, &status); return status & AT91_PMC_MOSCRCS; } static int clk_main_rc_osc_prepare(struct clk_hw *hw) { struct clk_main_rc_osc *osc = to_clk_main_rc_osc(hw); struct regmap *regmap = osc->regmap; unsigned int mor; regmap_read(regmap, AT91_CKGR_MOR, &mor); if (!(mor & AT91_PMC_MOSCRCEN)) regmap_update_bits(regmap, AT91_CKGR_MOR, MOR_KEY_MASK | AT91_PMC_MOSCRCEN, AT91_PMC_MOSCRCEN | AT91_PMC_KEY); while (!clk_main_rc_osc_ready(regmap)) cpu_relax(); return 0; } static void clk_main_rc_osc_unprepare(struct clk_hw *hw) { struct clk_main_rc_osc *osc = to_clk_main_rc_osc(hw); struct regmap *regmap = osc->regmap; unsigned int mor; regmap_read(regmap, AT91_CKGR_MOR, &mor); if (!(mor & AT91_PMC_MOSCRCEN)) return; regmap_update_bits(regmap, AT91_CKGR_MOR, MOR_KEY_MASK | AT91_PMC_MOSCRCEN, AT91_PMC_KEY); } static int clk_main_rc_osc_is_prepared(struct clk_hw *hw) { struct clk_main_rc_osc *osc = to_clk_main_rc_osc(hw); struct regmap *regmap = osc->regmap; unsigned int mor, status; regmap_read(regmap, AT91_CKGR_MOR, &mor); regmap_read(regmap, AT91_PMC_SR, &status); return (mor & AT91_PMC_MOSCRCEN) && (status & AT91_PMC_MOSCRCS); } static unsigned long clk_main_rc_osc_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) { struct clk_main_rc_osc *osc = to_clk_main_rc_osc(hw); return osc->frequency; } static unsigned long clk_main_rc_osc_recalc_accuracy(struct clk_hw *hw, unsigned long parent_acc) { struct clk_main_rc_osc *osc = to_clk_main_rc_osc(hw); return osc->accuracy; } static const struct clk_ops main_rc_osc_ops = { .prepare = clk_main_rc_osc_prepare, .unprepare = clk_main_rc_osc_unprepare, .is_prepared = clk_main_rc_osc_is_prepared, .recalc_rate = clk_main_rc_osc_recalc_rate, .recalc_accuracy = clk_main_rc_osc_recalc_accuracy, }; static struct clk_hw * __init at91_clk_register_main_rc_osc(struct regmap *regmap, const char *name, u32 frequency, u32 accuracy) { struct clk_main_rc_osc *osc; struct clk_init_data init; struct clk_hw *hw; int ret; if (!name || !frequency) return ERR_PTR(-EINVAL); osc = kzalloc(sizeof(*osc), GFP_KERNEL); if (!osc) return ERR_PTR(-ENOMEM); init.name = name; init.ops = &main_rc_osc_ops; init.parent_names = NULL; init.num_parents = 0; init.flags = CLK_IGNORE_UNUSED; osc->hw.init = &init; osc->regmap = regmap; osc->frequency = frequency; osc->accuracy = accuracy; hw = &osc->hw; ret = clk_hw_register(NULL, hw); if (ret) { kfree(osc); hw = ERR_PTR(ret); } return hw; } static void __init of_at91sam9x5_clk_main_rc_osc_setup(struct device_node *np) { struct clk_hw *hw; u32 frequency = 0; u32 accuracy = 0; const char *name = np->name; struct regmap *regmap; of_property_read_string(np, "clock-output-names", &name); of_property_read_u32(np, "clock-frequency", &frequency); of_property_read_u32(np, "clock-accuracy", &accuracy); regmap = syscon_node_to_regmap(of_get_parent(np)); if (IS_ERR(regmap)) return; hw = at91_clk_register_main_rc_osc(regmap, name, frequency, accuracy); if (IS_ERR(hw)) return; of_clk_add_hw_provider(np, of_clk_hw_simple_get, hw); } CLK_OF_DECLARE(at91sam9x5_clk_main_rc_osc, "atmel,at91sam9x5-clk-main-rc-osc", of_at91sam9x5_clk_main_rc_osc_setup); static int clk_main_probe_frequency(struct regmap *regmap) { unsigned long prep_time, timeout; unsigned int mcfr; timeout = jiffies + usecs_to_jiffies(MAINFRDY_TIMEOUT); do { prep_time = jiffies; regmap_read(regmap, AT91_CKGR_MCFR, &mcfr); if (mcfr & AT91_PMC_MAINRDY) return 0; usleep_range(MAINF_LOOP_MIN_WAIT, MAINF_LOOP_MAX_WAIT); } while (time_before(prep_time, timeout)); return -ETIMEDOUT; } static unsigned long clk_main_recalc_rate(struct regmap *regmap, unsigned long parent_rate) { unsigned int mcfr; if (parent_rate) return parent_rate; pr_warn("Main crystal frequency not set, using approximate value\n"); regmap_read(regmap, AT91_CKGR_MCFR, &mcfr); if (!(mcfr & AT91_PMC_MAINRDY)) return 0; return ((mcfr & AT91_PMC_MAINF) * SLOW_CLOCK_FREQ) / MAINF_DIV; } static int clk_rm9200_main_prepare(struct clk_hw *hw) { struct clk_rm9200_main *clkmain = to_clk_rm9200_main(hw); return clk_main_probe_frequency(clkmain->regmap); } static int clk_rm9200_main_is_prepared(struct clk_hw *hw) { struct clk_rm9200_main *clkmain = to_clk_rm9200_main(hw); unsigned int status; regmap_read(clkmain->regmap, AT91_CKGR_MCFR, &status); return status & AT91_PMC_MAINRDY ? 1 : 0; } static unsigned long clk_rm9200_main_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) { struct clk_rm9200_main *clkmain = to_clk_rm9200_main(hw); return clk_main_recalc_rate(clkmain->regmap, parent_rate); } static const struct clk_ops rm9200_main_ops = { .prepare = clk_rm9200_main_prepare, .is_prepared = clk_rm9200_main_is_prepared, .recalc_rate = clk_rm9200_main_recalc_rate, }; static struct clk_hw * __init at91_clk_register_rm9200_main(struct regmap *regmap, const char *name, const char *parent_name) { struct clk_rm9200_main *clkmain; struct clk_init_data init; struct clk_hw *hw; int ret; if (!name) return ERR_PTR(-EINVAL); if (!parent_name) return ERR_PTR(-EINVAL); clkmain = kzalloc(sizeof(*clkmain), GFP_KERNEL); if (!clkmain) return ERR_PTR(-ENOMEM); init.name = name; init.ops = &rm9200_main_ops; init.parent_names = &parent_name; init.num_parents = 1; init.flags = 0; clkmain->hw.init = &init; clkmain->regmap = regmap; hw = &clkmain->hw; ret = clk_hw_register(NULL, &clkmain->hw); if (ret) { kfree(clkmain); hw = ERR_PTR(ret); } return hw; } static void __init of_at91rm9200_clk_main_setup(struct device_node *np) { struct clk_hw *hw; const char *parent_name; const char *name = np->name; struct regmap *regmap; parent_name = of_clk_get_parent_name(np, 0); of_property_read_string(np, "clock-output-names", &name); regmap = syscon_node_to_regmap(of_get_parent(np)); if (IS_ERR(regmap)) return; hw = at91_clk_register_rm9200_main(regmap, name, parent_name); if (IS_ERR(hw)) return; of_clk_add_hw_provider(np, of_clk_hw_simple_get, hw); } CLK_OF_DECLARE(at91rm9200_clk_main, "atmel,at91rm9200-clk-main", of_at91rm9200_clk_main_setup); static inline bool clk_sam9x5_main_ready(struct regmap *regmap) { unsigned int status; regmap_read(regmap, AT91_PMC_SR, &status); return status & AT91_PMC_MOSCSELS ? 1 : 0; } static int clk_sam9x5_main_prepare(struct clk_hw *hw) { struct clk_sam9x5_main *clkmain = to_clk_sam9x5_main(hw); struct regmap *regmap = clkmain->regmap; while (!clk_sam9x5_main_ready(regmap)) cpu_relax(); return clk_main_probe_frequency(regmap); } static int clk_sam9x5_main_is_prepared(struct clk_hw *hw) { struct clk_sam9x5_main *clkmain = to_clk_sam9x5_main(hw); return clk_sam9x5_main_ready(clkmain->regmap); } static unsigned long clk_sam9x5_main_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) { struct clk_sam9x5_main *clkmain = to_clk_sam9x5_main(hw); return clk_main_recalc_rate(clkmain->regmap, parent_rate); } static int clk_sam9x5_main_set_parent(struct clk_hw *hw, u8 index) { struct clk_sam9x5_main *clkmain = to_clk_sam9x5_main(hw); struct regmap *regmap = clkmain->regmap; unsigned int tmp; if (index > 1) return -EINVAL; regmap_read(regmap, AT91_CKGR_MOR, &tmp); tmp &= ~MOR_KEY_MASK; if (index && !(tmp & AT91_PMC_MOSCSEL)) regmap_write(regmap, AT91_CKGR_MOR, tmp | AT91_PMC_MOSCSEL); else if (!index && (tmp & AT91_PMC_MOSCSEL)) regmap_write(regmap, AT91_CKGR_MOR, tmp & ~AT91_PMC_MOSCSEL); while (!clk_sam9x5_main_ready(regmap)) cpu_relax(); return 0; } static u8 clk_sam9x5_main_get_parent(struct clk_hw *hw) { struct clk_sam9x5_main *clkmain = to_clk_sam9x5_main(hw); unsigned int status; regmap_read(clkmain->regmap, AT91_CKGR_MOR, &status); return status & AT91_PMC_MOSCEN ? 1 : 0; } static const struct clk_ops sam9x5_main_ops = { .prepare = clk_sam9x5_main_prepare, .is_prepared = clk_sam9x5_main_is_prepared, .recalc_rate = clk_sam9x5_main_recalc_rate, .set_parent = clk_sam9x5_main_set_parent, .get_parent = clk_sam9x5_main_get_parent, }; static struct clk_hw * __init at91_clk_register_sam9x5_main(struct regmap *regmap, const char *name, const char **parent_names, int num_parents) { struct clk_sam9x5_main *clkmain; struct clk_init_data init; unsigned int status; struct clk_hw *hw; int ret; if (!name) return ERR_PTR(-EINVAL); if (!parent_names || !num_parents) return ERR_PTR(-EINVAL); clkmain = kzalloc(sizeof(*clkmain), GFP_KERNEL); if (!clkmain) return ERR_PTR(-ENOMEM); init.name = name; init.ops = &sam9x5_main_ops; init.parent_names = parent_names; init.num_parents = num_parents; init.flags = CLK_SET_PARENT_GATE; clkmain->hw.init = &init; clkmain->regmap = regmap; regmap_read(clkmain->regmap, AT91_CKGR_MOR, &status); clkmain->parent = status & AT91_PMC_MOSCEN ? 1 : 0; hw = &clkmain->hw; ret = clk_hw_register(NULL, &clkmain->hw); if (ret) { kfree(clkmain); hw = ERR_PTR(ret); } return hw; } static void __init of_at91sam9x5_clk_main_setup(struct device_node *np) { struct clk_hw *hw; const char *parent_names[2]; unsigned int num_parents; const char *name = np->name; struct regmap *regmap; num_parents = of_clk_get_parent_count(np); if (num_parents == 0 || num_parents > 2) return; of_clk_parent_fill(np, parent_names, num_parents); regmap = syscon_node_to_regmap(of_get_parent(np)); if (IS_ERR(regmap)) return; of_property_read_string(np, "clock-output-names", &name); hw = at91_clk_register_sam9x5_main(regmap, name, parent_names, num_parents); if (IS_ERR(hw)) return; of_clk_add_hw_provider(np, of_clk_hw_simple_get, hw); } CLK_OF_DECLARE(at91sam9x5_clk_main, "atmel,at91sam9x5-clk-main", of_at91sam9x5_clk_main_setup);
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en" ng-app="jpm"> <head> <meta charset="utf-8" /> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" /> <meta name="viewport" content="width=device-width, initial-scale=1" /> <link href="/releases/4.2.0/css/style.css" rel="stylesheet" /> <script src="https://code.jquery.com/jquery-3.4.1.min.js"></script> <script src="/js/releases.js"></script> <!-- Begin Jekyll SEO tag v2.5.0 --> <title>-runjdb PORT</title> <meta name="generator" content="Jekyll v3.7.4" /> <meta property="og:title" content="-runjdb PORT" /> <meta property="og:locale" content="en_US" /> <meta name="description" content="public int launch() throws Exception { prepare(); java = new Command(); // // Handle the environment // Map&lt;String,String&gt; env = getRunEnv(); for ( Map.Entry&lt;String,String&gt; e:env.entrySet()) { java.var(e.getKey(), e.getValue()); } java.add(project.getProperty(&quot;java&quot;, &quot;java&quot;)); String javaagent = project.getProperty(Constants.JAVAAGENT); if (Processor.isTrue(javaagent)) { for (String agent : agents) { java.add(&quot;-javaagent:&quot; + agent); } }" /> <meta property="og:description" content="public int launch() throws Exception { prepare(); java = new Command(); // // Handle the environment // Map&lt;String,String&gt; env = getRunEnv(); for ( Map.Entry&lt;String,String&gt; e:env.entrySet()) { java.var(e.getKey(), e.getValue()); } java.add(project.getProperty(&quot;java&quot;, &quot;java&quot;)); String javaagent = project.getProperty(Constants.JAVAAGENT); if (Processor.isTrue(javaagent)) { for (String agent : agents) { java.add(&quot;-javaagent:&quot; + agent); } }" /> <meta property="og:type" content="article" /> <meta property="article:published_time" content="2019-10-12T19:48:23-04:00" /> <script type="application/ld+json"> {"headline":"-runjdb PORT","dateModified":"2019-10-12T19:48:23-04:00","datePublished":"2019-10-12T19:48:23-04:00","@type":"BlogPosting","mainEntityOfPage":{"@type":"WebPage","@id":"/releases/4.2.0/instructions/runjdb.html"},"url":"/releases/4.2.0/instructions/runjdb.html","description":"public int launch() throws Exception { prepare(); java = new Command(); // // Handle the environment // Map&lt;String,String&gt; env = getRunEnv(); for ( Map.Entry&lt;String,String&gt; e:env.entrySet()) { java.var(e.getKey(), e.getValue()); } java.add(project.getProperty(&quot;java&quot;, &quot;java&quot;)); String javaagent = project.getProperty(Constants.JAVAAGENT); if (Processor.isTrue(javaagent)) { for (String agent : agents) { java.add(&quot;-javaagent:&quot; + agent); } }","@context":"http://schema.org"}</script> <!-- End Jekyll SEO tag --> </head> <body> <ul class="container12 menu-bar"> <li span=11><a class=menu-link href="/releases/4.2.0/"><img class=menu-logo src='/releases/4.2.0/img/bnd-80x40-white.png'></a> <a href="/releases/4.2.0/chapters/110-introduction.html">Intro </a><a href="/releases/4.2.0/chapters/800-headers.html">Headers </a><a href="/releases/4.2.0/chapters/825-instructions-ref.html">Instructions </a><a href="/releases/4.2.0/chapters/855-macros-ref.html">Macros </a><a href="/releases/4.2.0/chapters/400-commands.html">Commands </a><div class="releases"><button class="dropbtn">4.2.0</button><div class="dropdown-content"></div></div> <li class=menu-link span=1> <a href="https://github.com/bndtools/bnd" target="_"><img style="position:absolute;top:0;right:0;margin:0;padding:0;z-index:100" src="https://camo.githubusercontent.com/38ef81f8aca64bb9a64448d0d70f1308ef5341ab/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f72696768745f6461726b626c75655f3132313632312e706e67" alt="Fork me on GitHub" data-canonical-src="https://s3.amazonaws.com/github/ribbons/forkme_right_darkblue_121621.png"></a> </ul> <ul class=container12> <li span=3> <div> <ul class="side-nav"> <li><a href="/releases/4.2.0/chapters/110-introduction.html">Introduction</a> <li><a href="/releases/4.2.0/chapters/120-install.html">How to install bnd</a> <li><a href="/releases/4.2.0/chapters/123-tour-workspace.html">Guided Tour</a> <li><a href="/releases/4.2.0/chapters/125-tour-features.html">Guided Tour Workspace & Projects</a> <li><a href="/releases/4.2.0/chapters/130-concepts.html">Concepts</a> <li><a href="/releases/4.2.0/chapters/140-best-practices.html">Best practices</a> <li><a href="/releases/4.2.0/chapters/150-build.html">Build</a> <li><a href="/releases/4.2.0/chapters/160-jars.html">Generating JARs</a> <li><a href="/releases/4.2.0/chapters/170-versioning.html">Versioning</a> <li><a href="/releases/4.2.0/chapters/180-baselining.html">Baselining</a> <li><a href="/releases/4.2.0/chapters/200-components.html">Service Components</a> <li><a href="/releases/4.2.0/chapters/210-metatype.html">Metatype</a> <li><a href="/releases/4.2.0/chapters/220-contracts.html">Contracts</a> <li><a href="/releases/4.2.0/chapters/230-manifest-annotations.html">Manifest Annotations</a> <li><a href="/releases/4.2.0/chapters/250-resolving.html">Resolving Dependencies</a> <li><a href="/releases/4.2.0/chapters/300-launching.html">Launching</a> <li><a href="/releases/4.2.0/chapters/305-Junit-Testing-OSGi.html">Plain JUnit Testing with OSGi (PRELIMENARY)</a> <li><a href="/releases/4.2.0/chapters/310-testing.html">Testing</a> <li><a href="/releases/4.2.0/chapters/320-packaging.html">Packaging Applications</a> <li><a href="/releases/4.2.0/chapters/390-wrapping.html">Wrapping Libraries to OSGi Bundles</a> <li><a href="/releases/4.2.0/chapters/400-commands.html">Commands</a> <li><a href="/releases/4.2.0/chapters/600-developer.html">For Developers</a> <li><a href="/releases/4.2.0/chapters/700-tools.html">Tools bound to bnd</a> <li><a href="/releases/4.2.0/chapters/800-headers.html">Headers</a> <li><a href="/releases/4.2.0/chapters/820-instructions.html">Instruction Reference</a> <li><a href="/releases/4.2.0/chapters/825-instructions-ref.html">Instruction Index</a> <li><a href="/releases/4.2.0/chapters/850-macros.html">Macro Reference</a> <li><a href="/releases/4.2.0/chapters/855-macros-ref.html">Macro Index</a> <li><a href="/releases/4.2.0/chapters/870-plugins.html">Plugins</a> <li><a href="/releases/4.2.0/chapters/880-settings.html">Settings</a> <li><a href="/releases/4.2.0/chapters/900-errors.html">Errors</a> <li><a href="/releases/4.2.0/chapters/910-warnings.html">Warnings</a> <li><a href="/releases/4.2.0/chapters/920-faq.html">Frequently Asked Questions</a> </ul> </div> <li span=9> <div class=notes-margin> <h1> -runjdb PORT</h1> <div class="highlighter-rouge"><div class="highlight"><pre class="highlight"><code>public int launch() throws Exception { prepare(); java = new Command(); // // Handle the environment // Map&lt;String,String&gt; env = getRunEnv(); for ( Map.Entry&lt;String,String&gt; e:env.entrySet()) { java.var(e.getKey(), e.getValue()); } java.add(project.getProperty("java", "java")); String javaagent = project.getProperty(Constants.JAVAAGENT); if (Processor.isTrue(javaagent)) { for (String agent : agents) { java.add("-javaagent:" + agent); } } String jdb = getRunJdb(); if (jdb != null) { int port = 1044; try { port = Integer.parseInt(project.getProperty(Constants.RUNJDB)); } catch (Exception e) { // ok, value can also be ok, or on, or true } String suspend = port &gt; 0 ? "y" : "n"; java.add("-Xrunjdwp:server=y,transport=dt_socket,address=" + Math.abs(port) + ",suspend=" + suspend); } java.add("-cp"); java.add(Processor.join(getClasspath(), File.pathSeparator)); java.addAll(getRunVM()); java.add(getMainTypeName()); java.addAll(getRunProgramArgs()); if (timeout != 0) java.setTimeout(timeout + 1000, TimeUnit.MILLISECONDS); File cwd = getCwd(); if (cwd != null) java.setCwd(cwd); project.trace("cmd line %s", java); try { int result = java.execute(System.in, System.err, System.err); if (result == Integer.MIN_VALUE) return TIMEDOUT; reportResult(result); return result; } finally { cleanup(); listeners.clear(); } } </code></pre></div></div> </div> </ul> <nav class=next-prev> <a href='/releases/4.2.0/instructions/runfw.html'></a> <a href='/releases/4.2.0/instructions/runkeep.html'></a> </nav> <footer class="container12" style="border-top: 1px solid black;padding:10px 0"> <ul span=12 row> <li span=12> <ul> <li><a href="/releases/4.2.0/">GitHub</a> </ul> </ul> </footer> </body> </html>
{ "pile_set_name": "Github" }
<%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%> <%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %> <%@ taglib uri="http://www.springframework.org/security/tags" prefix="sec" %> <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Home Page</title> </head> <body> <h3>Home Page</h3> <p> Hello <b><c:out value="${pageContext.request.remoteUser}"/></b><br> Roles: <b><sec:authentication property="principal.authorities" /></b> </p> <form action="logout" method="post"> <input type="submit" value="Logout" /> <input type="hidden" name="${_csrf.parameterName}" value="${_csrf.token}"/> </form> </body> </html>
{ "pile_set_name": "Github" }
import { Badge } from 'terra-icon/package.json?dev-site-package'; import ChangeLog from 'terra-icon/CHANGELOG.md'; <Badge /> <ChangeLog />
{ "pile_set_name": "Github" }
package tlv import ( "bytes" "errors" "io" "io/ioutil" "math" ) // MaxRecordSize is the maximum size of a particular record that will be parsed // by a stream decoder. This value is currently chosen to the be equal to the // maximum message size permitted by BOLT 1, as no record should be bigger than // an entire message. const MaxRecordSize = 65535 // 65KB // ErrStreamNotCanonical signals that a decoded stream does not contain records // sorting by monotonically-increasing type. var ErrStreamNotCanonical = errors.New("tlv stream is not canonical") // ErrRecordTooLarge signals that a decoded record has a length that is too // long to parse. var ErrRecordTooLarge = errors.New("record is too large") // Stream defines a TLV stream that can be used for encoding or decoding a set // of TLV Records. type Stream struct { records []Record buf [8]byte } // NewStream creates a new TLV Stream given an encoding codec, a decoding codec, // and a set of known records. func NewStream(records ...Record) (*Stream, error) { // Assert that the ordering of the Records is canonical and appear in // ascending order of type. var ( min Type overflow bool ) for _, record := range records { if overflow || record.typ < min { return nil, ErrStreamNotCanonical } if record.encoder == nil { record.encoder = ENOP } if record.decoder == nil { record.decoder = DNOP } if record.typ == math.MaxUint64 { overflow = true } min = record.typ + 1 } return &Stream{ records: records, }, nil } // MustNewStream creates a new TLV Stream given an encoding codec, a decoding // codec, and a set of known records. If an error is encountered in creating the // stream, this method will panic instead of returning the error. func MustNewStream(records ...Record) *Stream { stream, err := NewStream(records...) if err != nil { panic(err.Error()) } return stream } // Encode writes a Stream to the passed io.Writer. Each of the Records known to // the Stream is written in ascending order of their type so as to be canonical. // // The stream is constructed by concatenating the individual, serialized Records // where each record has the following format: // [varint: type] // [varint: length] // [length: value] // // An error is returned if the io.Writer fails to accept bytes from the // encoding, and nothing else. The ordering of the Records is asserted upon the // creation of a Stream, and thus the output will be by definition canonical. func (s *Stream) Encode(w io.Writer) error { // Iterate through all known records, if any, serializing each record's // type, length and value. for i := range s.records { rec := &s.records[i] // Write the record's type as a varint. err := WriteVarInt(w, uint64(rec.typ), &s.buf) if err != nil { return err } // Write the record's length as a varint. err = WriteVarInt(w, rec.Size(), &s.buf) if err != nil { return err } // Encode the current record's value using the stream's codec. err = rec.encoder(w, rec.value, &s.buf) if err != nil { return err } } return nil } // Decode deserializes TLV Stream from the passed io.Reader. The Stream will // inspect each record that is parsed and check to see if it has a corresponding // Record to facilitate deserialization of that field. If the record is unknown, // the Stream will discard the record's bytes and proceed to the subsequent // record. // // Each record has the following format: // [varint: type] // [varint: length] // [length: value] // // A series of (possibly zero) records are concatenated into a stream, this // example contains two records: // // (t: 0x01, l: 0x04, v: 0xff, 0xff, 0xff, 0xff) // (t: 0x02, l: 0x01, v: 0x01) // // This method asserts that the byte stream is canonical, namely that each // record is unique and that all records are sorted in ascending order. An // ErrNotCanonicalStream error is returned if the encoded TLV stream is not. // // We permit an io.EOF error only when reading the type byte which signals that // the last record was read cleanly and we should stop parsing. All other io.EOF // or io.ErrUnexpectedEOF errors are returned. func (s *Stream) Decode(r io.Reader) error { _, err := s.decode(r, nil) return err } // DecodeWithParsedTypes is identical to Decode, but if successful, returns a // TypeMap containing the types of all records that were decoded or ignored from // the stream. func (s *Stream) DecodeWithParsedTypes(r io.Reader) (TypeMap, error) { return s.decode(r, make(TypeMap)) } // decode is a helper function that performs the basis of stream decoding. If // the caller needs the set of parsed types, it must provide an initialized // parsedTypes, otherwise the returned TypeMap will be nil. func (s *Stream) decode(r io.Reader, parsedTypes TypeMap) (TypeMap, error) { var ( typ Type min Type recordIdx int overflow bool ) // Iterate through all possible type identifiers. As types are read from // the io.Reader, min will skip forward to the last read type. for { // Read the next varint type. t, err := ReadVarInt(r, &s.buf) switch { // We'll silence an EOF when zero bytes remain, meaning the // stream was cleanly encoded. case err == io.EOF: return parsedTypes, nil // Other unexpected errors. case err != nil: return nil, err } typ = Type(t) // Assert that this type is greater than any previously read. // If we've already overflowed and we parsed another type, the // stream is not canonical. This check prevents us from accepts // encodings that have duplicate records or from accepting an // unsorted series. if overflow || typ < min { return nil, ErrStreamNotCanonical } // Read the varint length. length, err := ReadVarInt(r, &s.buf) switch { // We'll convert any EOFs to ErrUnexpectedEOF, since this // results in an invalid record. case err == io.EOF: return nil, io.ErrUnexpectedEOF // Other unexpected errors. case err != nil: return nil, err } // Place a soft limit on the size of a sane record, which // prevents malicious encoders from causing us to allocate an // unbounded amount of memory when decoding variable-sized // fields. if length > MaxRecordSize { return nil, ErrRecordTooLarge } // Search the records known to the stream for this type. We'll // begin the search and recordIdx and walk forward until we find // it or the next record's type is larger. rec, newIdx, ok := s.getRecord(typ, recordIdx) switch { // We know of this record type, proceed to decode the value. // This method asserts that length bytes are read in the // process, and returns an error if the number of bytes is not // exactly length. case ok: err := rec.decoder(r, rec.value, &s.buf, length) switch { // We'll convert any EOFs to ErrUnexpectedEOF, since this // results in an invalid record. case err == io.EOF: return nil, io.ErrUnexpectedEOF // Other unexpected errors. case err != nil: return nil, err } // Record the successfully decoded type if the caller // provided an initialized TypeMap. if parsedTypes != nil { parsedTypes[typ] = nil } // Otherwise, the record type is unknown and is odd, discard the // number of bytes specified by length. default: // If the caller provided an initialized TypeMap, record // the encoded bytes. var b *bytes.Buffer writer := ioutil.Discard if parsedTypes != nil { b = bytes.NewBuffer(make([]byte, 0, length)) writer = b } _, err := io.CopyN(writer, r, int64(length)) switch { // We'll convert any EOFs to ErrUnexpectedEOF, since this // results in an invalid record. case err == io.EOF: return nil, io.ErrUnexpectedEOF // Other unexpected errors. case err != nil: return nil, err } if parsedTypes != nil { parsedTypes[typ] = b.Bytes() } } // Update our record index so that we can begin our next search // from where we left off. recordIdx = newIdx // If we've parsed the largest possible type, the next loop will // overflow back to zero. However, we need to attempt parsing // the next type to ensure that the stream is empty. if typ == math.MaxUint64 { overflow = true } // Finally, set our lower bound on the next accepted type. min = typ + 1 } } // getRecord searches for a record matching typ known to the stream. The boolean // return value indicates whether the record is known to the stream. The integer // return value carries the index from where getRecord should be invoked on the // subsequent call. The first call to getRecord should always use an idx of 0. func (s *Stream) getRecord(typ Type, idx int) (Record, int, bool) { for idx < len(s.records) { record := s.records[idx] switch { // Found target record, return it to the caller. The next index // returned points to the immediately following record. case record.typ == typ: return record, idx + 1, true // This record's type is lower than the target. Advance our // index and continue to the next record which will have a // strictly higher type. case record.typ < typ: idx++ continue // This record's type is larger than the target, hence we have // no record matching the current type. Return the current index // so that we can start our search from here when processing the // next tlv record. default: return Record{}, idx, false } } // All known records are exhausted. return Record{}, idx, false }
{ "pile_set_name": "Github" }
package com.easy.aliNacosProvider; import lombok.extern.slf4j.Slf4j; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RestController; @RestController @Slf4j public class HelloController { @GetMapping(value = "/hello/{str}", produces = "application/json") public String hello(@PathVariable String str) { log.info("-----------收到消费者请求-----------"); log.info("收到消费者传递的参数:" + str); String result = "我是服务提供者,见到你很高兴==>" + str; log.info("提供者返回结果:" + result); return result; } }
{ "pile_set_name": "Github" }
/****************************************************************************** * * Copyright (C) 2011 - 2014 Xilinx, Inc. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * Use of the Software is limited solely to applications: * (a) running on a Xilinx device, or * (b) that interact with a Xilinx device through a bus or interconnect. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * XILINX BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF * OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * Except as contained in this notice, the name of the Xilinx shall not be used * in advertising or otherwise to promote the sale, use or other dealings in * this Software without prior written authorization from Xilinx. * ******************************************************************************/ /*****************************************************************************/ /** * * @file xadcps_selftest.c * @addtogroup xadcps_v2_2 * @{ * * This file contains a diagnostic self test function for the XAdcPs driver. * The self test function does a simple read/write test of the Alarm Threshold * Register. * * See xadcps.h for more information. * * @note None. * * <pre> * * MODIFICATION HISTORY: * * Ver Who Date Changes * ----- ----- -------- ----------------------------------------------------- * 1.00a ssb 12/22/11 First release based on the XPS/AXI xadc driver * * </pre> * *****************************************************************************/ /***************************** Include Files ********************************/ #include "xadcps.h" /************************** Constant Definitions ****************************/ /* * The following constant defines the test value to be written * to the Alarm Threshold Register */ #define XADCPS_ATR_TEST_VALUE 0x55 /**************************** Type Definitions ******************************/ /***************** Macros (Inline Functions) Definitions ********************/ /************************** Variable Definitions ****************************/ /************************** Function Prototypes *****************************/ /*****************************************************************************/ /** * * Run a self-test on the driver/device. The test * - Resets the device, * - Writes a value into the Alarm Threshold register and reads it back * for comparison. * - Resets the device again. * * * @param InstancePtr is a pointer to the XAdcPs instance. * * @return * - XST_SUCCESS if the value read from the Alarm Threshold * register is the same as the value written. * - XST_FAILURE Otherwise * * @note This is a destructive test in that resets of the device are * performed. Refer to the device specification for the * device status after the reset operation. * ******************************************************************************/ int XAdcPs_SelfTest(XAdcPs *InstancePtr) { int Status; u32 RegValue; /* * Assert the argument */ Xil_AssertNonvoid(InstancePtr != NULL); Xil_AssertNonvoid(InstancePtr->IsReady == XIL_COMPONENT_IS_READY); /* * Reset the device to get it back to its default state */ XAdcPs_Reset(InstancePtr); /* * Write a value into the Alarm Threshold registers, read it back, and * do the comparison */ XAdcPs_SetAlarmThreshold(InstancePtr, XADCPS_ATR_VCCINT_UPPER, XADCPS_ATR_TEST_VALUE); RegValue = XAdcPs_GetAlarmThreshold(InstancePtr, XADCPS_ATR_VCCINT_UPPER); if (RegValue == XADCPS_ATR_TEST_VALUE) { Status = XST_SUCCESS; } else { Status = XST_FAILURE; } /* * Reset the device again to its default state. */ XAdcPs_Reset(InstancePtr); /* * Return the test result. */ return Status; } /** @} */
{ "pile_set_name": "Github" }
# frozen_string_literal: true module Solargraph::LanguageServer::Message::Workspace class DidChangeWatchedFiles < Solargraph::LanguageServer::Message::Base CREATED = 1 CHANGED = 2 DELETED = 3 include Solargraph::LanguageServer::UriHelpers def process need_catalog = false # @param change [Hash] params['changes'].each do |change| if change['type'] == CREATED host.create change['uri'] need_catalog = true elsif change['type'] == CHANGED next if host.open?(change['uri']) host.create change['uri'] need_catalog = true elsif change['type'] == DELETED host.delete change['uri'] need_catalog = true else set_error Solargraph::LanguageServer::ErrorCodes::INVALID_PARAMS, "Unknown change type ##{change['type']} for #{uri_to_file(change['uri'])}" end end # Force host to catalog libraries after file changes (see castwide/solargraph#139) host.catalog if need_catalog end end end
{ "pile_set_name": "Github" }
# -*- mode: ruby -*- # vi: set ft=ruby : # All Vagrant configuration is done below. The "2" in Vagrant.configure # configures the configuration version (we support older styles for # backwards compatibility). Please don't change it unless you know what # you're doing. Vagrant.configure(2) do |config| # The most common configuration options are documented and commented below. # For a complete reference, please see the online documentation at # https://docs.vagrantup.com. # Every Vagrant development environment requires a box. You can search for # boxes at https://atlas.hashicorp.com/search. config.vm.box = "ubuntu/trusty64" # Disable automatic box update checking. If you disable this, then # boxes will only be checked for updates when the user runs # `vagrant box outdated`. This is not recommended. # config.vm.box_check_update = false # Create a forwarded port mapping which allows access to a specific port # within the machine from a port on the host machine. In the example below, # accessing "localhost:8080" will access port 80 on the guest machine. config.vm.network "forwarded_port", guest: 80, host: 8080 config.vm.network "forwarded_port", guest: 1234, host: 1234 # Create a private network, which allows host-only access to the machine # using a specific IP. # config.vm.network "private_network", ip: "10.0.0.10" # Create a public network, which generally matched to bridged network. # Bridged networks make the machine appear as another physical device on # your network. # config.vm.network "public_network" # Share an additional folder to the guest VM. The first argument is # the path on the host to the actual folder. The second argument is # the path on the guest to mount the folder. And the optional third # argument is a set of non-required options. # config.vm.synced_folder "./wordpress", "/home/vagrant/wordpress" # Provider-specific configuration so you can fine-tune various # backing providers for Vagrant. These expose provider-specific options. # Example for VirtualBox: # config.vm.provider "virtualbox" do |vb| # Display the VirtualBox GUI when booting the machine # vb.gui = true # Customize the amount of memory on the VM: vb.memory = "1024" end # # View the documentation for the provider you are using for more # information on available options. # Define a Vagrant Push strategy for pushing to Atlas. Other push strategies # such as FTP and Heroku are also available. See the documentation at # https://docs.vagrantup.com/v2/push/atlas.html for more information. # config.push.define "atlas" do |push| # push.app = "YOUR_ATLAS_USERNAME/YOUR_APPLICATION_NAME" # end # Enable provisioning with a shell script. Additional provisioners such as # Puppet, Chef, Ansible, Salt, and Docker are also available. Please see the # documentation for more information about their specific syntax and use. config.vm.provision "shell", inline: <<-SHELL sudo apt-get update \ && sudo apt-get -y install wget \ && wget -qO- https://get.docker.com/ | sh \ && gpasswd -a vagrant docker \ && service docker restart SHELL end
{ "pile_set_name": "Github" }
<?php namespace Elgg\Cache; /** * Least Recently Used Cache * * A fixed sized cache that removes the element used last when it reaches its * size limit. * * Based on https://github.com/cash/LRUCache * * @internal */ class LRUCache implements \ArrayAccess { /** @var int */ protected $maximumSize; /** * The front of the array contains the LRU element * * @var array */ protected $data = []; /** * Create a LRU Cache * * @param int $size The size of the cache * @throws \InvalidArgumentException */ public function __construct($size) { if (!is_int($size) || $size <= 0) { throw new \InvalidArgumentException(); } $this->maximumSize = $size; } /** * Get the value cached with this key * * @param int|string $key The key. Strings that are ints are cast to ints. * @param mixed $default The value to be returned if key not found. (Optional) * @return mixed */ public function get($key, $default = null) { if ($this->containsKey($key)) { $this->recordAccess($key); return $this->data[$key]; } return $default; } /** * Add something to the cache * * @param int|string $key The key. Strings that are ints are cast to ints. * @param mixed $value The value to cache * @return void */ public function set($key, $value) { if ($this->containsKey($key)) { $this->data[$key] = $value; $this->recordAccess($key); } else { $this->data[$key] = $value; if ($this->size() > $this->maximumSize) { // remove least recently used element (front of array) reset($this->data); unset($this->data[key($this->data)]); } } } /** * Get the number of elements in the cache * * @return int */ public function size() { return count($this->data); } /** * Does the cache contain an element with this key * * @param int|string $key The key * @return boolean */ public function containsKey($key) { return array_key_exists($key, $this->data); } /** * Remove the element with this key. * * @param int|string $key The key * @return mixed Value or null if not set */ public function remove($key) { if ($this->containsKey($key)) { $value = $this->data[$key]; unset($this->data[$key]); return $value; } return null; } /** * Clear the cache * * @return void */ public function clear() { $this->data = []; } /** * Moves the element from current position to end of array * * @param int|string $key The key * @return void */ protected function recordAccess($key) { $value = $this->data[$key]; unset($this->data[$key]); $this->data[$key] = $value; } /** * Assigns a value for the specified key * * @see \ArrayAccess::offsetSet() * * @param int|string $key The key to assign the value to. * @param mixed $value The value to set. * @return void */ public function offsetSet($key, $value) { $this->set($key, $value); } /** * Get the value for specified key * * @see \ArrayAccess::offsetGet() * * @param int|string $key The key to retrieve. * @return mixed */ public function offsetGet($key) { return $this->get($key); } /** * Unsets a key. * * @see \ArrayAccess::offsetUnset() * * @param int|string $key The key to unset. * @return void */ public function offsetUnset($key) { $this->remove($key); } /** * Does key exist? * * @see \ArrayAccess::offsetExists() * * @param int|string $key A key to check for. * @return boolean */ public function offsetExists($key) { return $this->containsKey($key); } }
{ "pile_set_name": "Github" }
; RUN: llc < %s -disable-fp-elim target datalayout = "e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-f32:32:32-f64:32:64-v64:64:64-v128:128:128-a0:0:64-f80:128:128-n8:16:32-S128" target triple = "i386-apple-macosx10.7" ; This test case has a landing pad with two predecessors, and a variable that ; is undef on the first edge while carrying the first function return value on ; the second edge. ; ; Live range splitting tries to isolate the block containing the first function ; call, and it is important that the last split point is after the function call ; so the return value can spill. ; ; <rdar://problem/10664933> @Exception = external unnamed_addr constant { i8*, i8* } declare void @llvm.memset.p0i8.i32(i8* nocapture, i8, i32, i32, i1) nounwind define void @f(i32* nocapture %arg, i32* nocapture %arg1, i32* nocapture %arg2, i32* nocapture %arg3, i32 %arg4, i32 %arg5) optsize ssp { bb: br i1 undef, label %bb6, label %bb7 bb6: ; preds = %bb %tmp = select i1 false, i32 0, i32 undef br label %bb7 bb7: ; preds = %bb6, %bb %tmp8 = phi i32 [ %tmp, %bb6 ], [ 0, %bb ] %tmp9 = shl i32 %tmp8, 2 %tmp10 = invoke noalias i8* @_Znam(i32 undef) optsize to label %bb11 unwind label %bb20 bb11: ; preds = %bb7 %tmp12 = ptrtoint i8* %tmp10 to i32 %tmp13 = bitcast i8* %tmp10 to i32* %tmp14 = shl i32 %tmp8, 2 %tmp15 = getelementptr i32* %tmp13, i32 undef %tmp16 = getelementptr i32* %tmp13, i32 undef %tmp17 = zext i32 %tmp9 to i64 %tmp18 = add i64 %tmp17, -1 %tmp19 = icmp ugt i64 %tmp18, 4294967295 br i1 %tmp19, label %bb29, label %bb31 bb20: ; preds = %bb43, %bb41, %bb29, %bb7 %tmp21 = phi i32 [ undef, %bb7 ], [ %tmp12, %bb43 ], [ %tmp12, %bb29 ], [ %tmp12, %bb41 ] %tmp22 = landingpad { i8*, i32 } personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) catch i8* bitcast ({ i8*, i8* }* @Exception to i8*) br i1 undef, label %bb23, label %bb69 bb23: ; preds = %bb38, %bb20 %tmp24 = phi i32 [ %tmp12, %bb38 ], [ %tmp21, %bb20 ] %tmp25 = icmp eq i32 %tmp24, 0 br i1 %tmp25, label %bb28, label %bb26 bb26: ; preds = %bb23 %tmp27 = inttoptr i32 %tmp24 to i8* br label %bb28 bb28: ; preds = %bb26, %bb23 ret void bb29: ; preds = %bb11 invoke void @OnOverFlow() optsize to label %bb30 unwind label %bb20 bb30: ; preds = %bb29 unreachable bb31: ; preds = %bb11 %tmp32 = bitcast i32* %tmp15 to i8* %tmp33 = zext i32 %tmp8 to i64 %tmp34 = add i64 %tmp33, -1 %tmp35 = icmp ugt i64 %tmp34, 4294967295 %tmp36 = icmp sgt i32 %tmp8, 0 %tmp37 = add i32 %tmp9, -4 br label %bb38 bb38: ; preds = %bb67, %bb31 %tmp39 = phi i32 [ %tmp68, %bb67 ], [ undef, %bb31 ] %tmp40 = icmp sgt i32 %tmp39, undef br i1 %tmp40, label %bb41, label %bb23 bb41: ; preds = %bb38 invoke void @Pjii(i32* %tmp16, i32 0, i32 %tmp8) optsize to label %bb42 unwind label %bb20 bb42: ; preds = %bb41 tail call void @llvm.memset.p0i8.i32(i8* %tmp32, i8 0, i32 %tmp9, i32 1, i1 false) nounwind br i1 %tmp35, label %bb43, label %bb45 bb43: ; preds = %bb42 invoke void @OnOverFlow() optsize to label %bb44 unwind label %bb20 bb44: ; preds = %bb43 unreachable bb45: ; preds = %bb57, %bb42 %tmp46 = phi i32 [ %tmp58, %bb57 ], [ 255, %bb42 ] %tmp47 = icmp slt i32 undef, 0 br i1 %tmp47, label %bb48, label %bb59 bb48: ; preds = %bb45 tail call void @llvm.memset.p0i8.i32(i8* %tmp32, i8 0, i32 %tmp9, i32 1, i1 false) nounwind br i1 %tmp36, label %bb49, label %bb57 bb49: ; preds = %bb49, %bb48 %tmp50 = phi i32 [ %tmp55, %bb49 ], [ 0, %bb48 ] %tmp51 = add i32 %tmp50, undef %tmp52 = add i32 %tmp50, undef %tmp53 = getelementptr i32* %tmp13, i32 %tmp52 %tmp54 = load i32* %tmp53, align 4, !tbaa !0 %tmp55 = add i32 %tmp50, 1 %tmp56 = icmp eq i32 %tmp55, %tmp8 br i1 %tmp56, label %bb57, label %bb49 bb57: ; preds = %bb49, %bb48 %tmp58 = add i32 %tmp46, -1 br label %bb45 bb59: ; preds = %bb45 %tmp60 = ashr i32 %tmp46, 31 tail call void @llvm.memset.p0i8.i32(i8* null, i8 0, i32 %tmp37, i32 1, i1 false) nounwind br i1 %tmp36, label %bb61, label %bb67 bb61: ; preds = %bb61, %bb59 %tmp62 = phi i32 [ %tmp65, %bb61 ], [ 0, %bb59 ] %tmp63 = add i32 %tmp62, %tmp14 %tmp64 = getelementptr i32* %tmp13, i32 %tmp63 store i32 0, i32* %tmp64, align 4, !tbaa !0 %tmp65 = add i32 %tmp62, 1 %tmp66 = icmp eq i32 %tmp65, %tmp8 br i1 %tmp66, label %bb67, label %bb61 bb67: ; preds = %bb61, %bb59 %tmp68 = add i32 %tmp39, -1 br label %bb38 bb69: ; preds = %bb20 resume { i8*, i32 } %tmp22 } declare i32 @__gxx_personality_v0(...) declare noalias i8* @_Znam(i32) optsize declare void @Pjii(i32*, i32, i32) optsize declare i32 @llvm.eh.typeid.for(i8*) nounwind readnone declare void @OnOverFlow() noreturn optsize ssp align 2 !0 = metadata !{metadata !"int", metadata !1} !1 = metadata !{metadata !"omnipotent char", metadata !2} !2 = metadata !{metadata !"Simple C/C++ TBAA", null}
{ "pile_set_name": "Github" }
{ "pile_set_name": "Github" }
package expression import ( "fmt" "strings" "github.com/aws/aws-sdk-go/service/dynamodb" "github.com/aws/aws-sdk-go/service/dynamodb/dynamodbattribute" ) // ValueBuilder represents an item attribute value operand and implements the // OperandBuilder interface. Methods and functions in the package take // ValueBuilder as an argument and establishes relationships between operands. // ValueBuilder should only be initialized using the function Value(). // // Example: // // // Create a ValueBuilder representing the string "aValue" // valueBuilder := expression.Value("aValue") type ValueBuilder struct { value interface{} } // NameBuilder represents a name of a top level item attribute or a nested // attribute. Since NameBuilder represents a DynamoDB Operand, it implements the // OperandBuilder interface. Methods and functions in the package take // NameBuilder as an argument and establishes relationships between operands. // NameBuilder should only be initialized using the function Name(). // // Example: // // // Create a NameBuilder representing the item attribute "aName" // nameBuilder := expression.Name("aName") type NameBuilder struct { name string } // SizeBuilder represents the output of the function size ("someName"), which // evaluates to the size of the item attribute defined by "someName". Since // SizeBuilder represents an operand, SizeBuilder implements the OperandBuilder // interface. Methods and functions in the package take SizeBuilder as an // argument and establishes relationships between operands. SizeBuilder should // only be initialized using the function Size(). // // Example: // // // Create a SizeBuilder representing the size of the item attribute // // "aName" // sizeBuilder := expression.Name("aName").Size() type SizeBuilder struct { nameBuilder NameBuilder } // KeyBuilder represents either the partition key or the sort key, both of which // are top level attributes to some item in DynamoDB. Since KeyBuilder // represents an operand, KeyBuilder implements the OperandBuilder interface. // Methods and functions in the package take KeyBuilder as an argument and // establishes relationships between operands. However, KeyBuilder should only // be used to describe Key Condition Expressions. KeyBuilder should only be // initialized using the function Key(). // // Example: // // // Create a KeyBuilder representing the item key "aKey" // keyBuilder := expression.Key("aKey") type KeyBuilder struct { key string } // setValueMode specifies the type of SetValueBuilder. The default value is // unsetValue so that an UnsetParameterError when BuildOperand() is called on an // empty SetValueBuilder. type setValueMode int const ( unsetValue setValueMode = iota plusValueMode minusValueMode listAppendValueMode ifNotExistsValueMode ) // SetValueBuilder represents the outcome of operator functions supported by the // DynamoDB Set operation. The operator functions are the following: // Plus() // Represents the "+" operator // Minus() // Represents the "-" operator // ListAppend() // IfNotExists() // For documentation on the above functions, // see: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET // Since SetValueBuilder represents an operand, it implements the OperandBuilder // interface. SetValueBuilder structs are used as arguments to the Set() // function. SetValueBuilders should only initialize a SetValueBuilder using the // functions listed above. type SetValueBuilder struct { leftOperand OperandBuilder rightOperand OperandBuilder mode setValueMode } // Operand represents an item attribute name or value in DynamoDB. The // relationship between Operands specified by various builders such as // ConditionBuilders and UpdateBuilders for example is processed internally to // write Condition Expressions and Update Expressions respectively. type Operand struct { exprNode exprNode } // OperandBuilder represents the idea of Operand which are building blocks to // DynamoDB Expressions. Package methods and functions can establish // relationships between operands, representing DynamoDB Expressions. The method // BuildOperand() is called recursively when the Build() method on the type // Builder is called. BuildOperand() should never be called externally. // OperandBuilder and BuildOperand() are exported to allow package functions to // take an interface as an argument. type OperandBuilder interface { BuildOperand() (Operand, error) } // Name creates a NameBuilder. The argument should represent the desired item // attribute. It is possible to reference nested item attributes by using // square brackets for lists and dots for maps. For documentation on specifying // item attributes, // see: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.Attributes.html // // Example: // // // Specify a top-level attribute // name := expression.Name("TopLevel") // // Specify a nested attribute // nested := expression.Name("Record[6].SongList") // // Use Name() to create a condition expression // condition := expression.Name("foo").Equal(expression.Name("bar")) func Name(name string) NameBuilder { return NameBuilder{ name: name, } } // Value creates a ValueBuilder. The argument should represent the desired item // attribute. The value is marshalled using the dynamodbattribute package by the // Build() method for type Builder. // // Example: // // // Use Value() to create a condition expression // condition := expression.Name("foo").Equal(expression.Value(10)) func Value(value interface{}) ValueBuilder { return ValueBuilder{ value: value, } } // Size creates a SizeBuilder representing the size of the item attribute // specified by the argument NameBuilder. Size() is only valid for certain types // of item attributes. For documentation, // see: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.OperatorsAndFunctions.html // SizeBuilder is only a valid operand in Condition Expressions and Filter // Expressions. // // Example: // // // Use Size() to create a condition expression // condition := expression.Name("foo").Size().Equal(expression.Value(10)) // // Expression Equivalent: // // expression.Name("aName").Size() // "size (aName)" func (nb NameBuilder) Size() SizeBuilder { return SizeBuilder{ nameBuilder: nb, } } // Size creates a SizeBuilder representing the size of the item attribute // specified by the argument NameBuilder. Size() is only valid for certain types // of item attributes. For documentation, // see: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.OperatorsAndFunctions.html // SizeBuilder is only a valid operand in Condition Expressions and Filter // Expressions. // // Example: // // // Use Size() to create a condition expression // condition := expression.Size(expression.Name("foo")).Equal(expression.Value(10)) // // Expression Equivalent: // // expression.Size(expression.Name("aName")) // "size (aName)" func Size(nameBuilder NameBuilder) SizeBuilder { return nameBuilder.Size() } // Key creates a KeyBuilder. The argument should represent the desired partition // key or sort key value. KeyBuilders should only be used to specify // relationships for Key Condition Expressions. When referring to the partition // key or sort key in any other Expression, use Name(). // // Example: // // // Use Key() to create a key condition expression // keyCondition := expression.Key("foo").Equal(expression.Value("bar")) func Key(key string) KeyBuilder { return KeyBuilder{ key: key, } } // Plus creates a SetValueBuilder to be used in as an argument to Set(). The // arguments can either be NameBuilders or ValueBuilders. Plus() only supports // DynamoDB Number types, so the ValueBuilder must be a Number and the // NameBuilder must specify an item attribute of type Number. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.IncrementAndDecrement // // Example: // // // Use Plus() to set the value of the item attribute "someName" to 5 + 10 // update, err := expression.Set(expression.Name("someName"), expression.Plus(expression.Value(5), expression.Value(10))) // // Expression Equivalent: // // expression.Plus(expression.Value(5), expression.Value(10)) // // let :five and :ten be ExpressionAttributeValues for the values 5 and // // 10 respectively. // ":five + :ten" func Plus(leftOperand, rightOperand OperandBuilder) SetValueBuilder { return SetValueBuilder{ leftOperand: leftOperand, rightOperand: rightOperand, mode: plusValueMode, } } // Plus creates a SetValueBuilder to be used in as an argument to Set(). The // arguments can either be NameBuilders or ValueBuilders. Plus() only supports // DynamoDB Number types, so the ValueBuilder must be a Number and the // NameBuilder must specify an item attribute of type Number. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.IncrementAndDecrement // // Example: // // // Use Plus() to set the value of the item attribute "someName" to the // // numeric value of item attribute "aName" incremented by 10 // update, err := expression.Set(expression.Name("someName"), expression.Name("aName").Plus(expression.Value(10))) // // Expression Equivalent: // // expression.Name("aName").Plus(expression.Value(10)) // // let :ten be ExpressionAttributeValues representing the value 10 // "aName + :ten" func (nb NameBuilder) Plus(rightOperand OperandBuilder) SetValueBuilder { return Plus(nb, rightOperand) } // Plus creates a SetValueBuilder to be used in as an argument to Set(). The // arguments can either be NameBuilders or ValueBuilders. Plus() only supports // DynamoDB Number types, so the ValueBuilder must be a Number and the // NameBuilder must specify an item attribute of type Number. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.IncrementAndDecrement // // Example: // // // Use Plus() to set the value of the item attribute "someName" to 5 + 10 // update, err := expression.Set(expression.Name("someName"), expression.Value(5).Plus(expression.Value(10))) // // Expression Equivalent: // // expression.Value(5).Plus(expression.Value(10)) // // let :five and :ten be ExpressionAttributeValues representing the value // // 5 and 10 respectively // ":five + :ten" func (vb ValueBuilder) Plus(rightOperand OperandBuilder) SetValueBuilder { return Plus(vb, rightOperand) } // Minus creates a SetValueBuilder to be used in as an argument to Set(). The // arguments can either be NameBuilders or ValueBuilders. Minus() only supports // DynamoDB Number types, so the ValueBuilder must be a Number and the // NameBuilder must specify an item attribute of type Number. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.IncrementAndDecrement // // Example: // // // Use Minus() to set the value of item attribute "someName" to 5 - 10 // update, err := expression.Set(expression.Name("someName"), expression.Minus(expression.Value(5), expression.Value(10))) // // Expression Equivalent: // // expression.Minus(expression.Value(5), expression.Value(10)) // // let :five and :ten be ExpressionAttributeValues for the values 5 and // // 10 respectively. // ":five - :ten" func Minus(leftOperand, rightOperand OperandBuilder) SetValueBuilder { return SetValueBuilder{ leftOperand: leftOperand, rightOperand: rightOperand, mode: minusValueMode, } } // Minus creates a SetValueBuilder to be used in as an argument to Set(). The // arguments can either be NameBuilders or ValueBuilders. Minus() only supports // DynamoDB Number types, so the ValueBuilder must be a Number and the // NameBuilder must specify an item attribute of type Number. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.IncrementAndDecrement // // Example: // // // Use Minus() to set the value of item attribute "someName" to the // // numeric value of "aName" decremented by 10 // update, err := expression.Set(expression.Name("someName"), expression.Name("aName").Minus(expression.Value(10))) // // Expression Equivalent: // // expression.Name("aName").Minus(expression.Value(10))) // // let :ten be ExpressionAttributeValues represent the value 10 // "aName - :ten" func (nb NameBuilder) Minus(rightOperand OperandBuilder) SetValueBuilder { return Minus(nb, rightOperand) } // Minus creates a SetValueBuilder to be used in as an argument to Set(). The // arguments can either be NameBuilders or ValueBuilders. Minus() only supports // DynamoDB Number types, so the ValueBuilder must be a Number and the // NameBuilder must specify an item attribute of type Number. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.IncrementAndDecrement // // Example: // // // Use Minus() to set the value of item attribute "someName" to 5 - 10 // update, err := expression.Set(expression.Name("someName"), expression.Value(5).Minus(expression.Value(10))) // // Expression Equivalent: // // expression.Value(5).Minus(expression.Value(10)) // // let :five and :ten be ExpressionAttributeValues for the values 5 and // // 10 respectively. // ":five - :ten" func (vb ValueBuilder) Minus(rightOperand OperandBuilder) SetValueBuilder { return Minus(vb, rightOperand) } // ListAppend creates a SetValueBuilder to be used in as an argument to Set(). // The arguments can either be NameBuilders or ValueBuilders. ListAppend() only // supports DynamoDB List types, so the ValueBuilder must be a List and the // NameBuilder must specify an item attribute of type List. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.UpdatingListElements // // Example: // // // Use ListAppend() to set item attribute "someName" to the item // // attribute "nameOfList" with "some" and "list" appended to it // update, err := expression.Set(expression.Name("someName"), expression.ListAppend(expression.Name("nameOfList"), expression.Value([]string{"some", "list"}))) // // Expression Equivalent: // // expression.ListAppend(expression.Name("nameOfList"), expression.Value([]string{"some", "list"}) // // let :list be a ExpressionAttributeValue representing the list // // containing "some" and "list". // "list_append (nameOfList, :list)" func ListAppend(leftOperand, rightOperand OperandBuilder) SetValueBuilder { return SetValueBuilder{ leftOperand: leftOperand, rightOperand: rightOperand, mode: listAppendValueMode, } } // ListAppend creates a SetValueBuilder to be used in as an argument to Set(). // The arguments can either be NameBuilders or ValueBuilders. ListAppend() only // supports DynamoDB List types, so the ValueBuilder must be a List and the // NameBuilder must specify an item attribute of type List. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.UpdatingListElements // // Example: // // // Use ListAppend() to set item attribute "someName" to the item // // attribute "nameOfList" with "some" and "list" appended to it // update, err := expression.Set(expression.Name("someName"), expression.Name("nameOfList").ListAppend(expression.Value([]string{"some", "list"}))) // // Expression Equivalent: // // expression.Name("nameOfList").ListAppend(expression.Value([]string{"some", "list"}) // // let :list be a ExpressionAttributeValue representing the list // // containing "some" and "list". // "list_append (nameOfList, :list)" func (nb NameBuilder) ListAppend(rightOperand OperandBuilder) SetValueBuilder { return ListAppend(nb, rightOperand) } // ListAppend creates a SetValueBuilder to be used in as an argument to Set(). // The arguments can either be NameBuilders or ValueBuilders. ListAppend() only // supports DynamoDB List types, so the ValueBuilder must be a List and the // NameBuilder must specify an item attribute of type List. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.UpdatingListElements // // Example: // // // Use ListAppend() to set item attribute "someName" to a string list // // equal to {"a", "list", "some", "list"} // update, err := expression.Set(expression.Name("someName"), expression.Value([]string{"a", "list"}).ListAppend(expression.Value([]string{"some", "list"}))) // // Expression Equivalent: // // expression.Name([]string{"a", "list"}).ListAppend(expression.Value([]string{"some", "list"}) // // let :list1 and :list2 be a ExpressionAttributeValue representing the // // list {"a", "list"} and {"some", "list"} respectively // "list_append (:list1, :list2)" func (vb ValueBuilder) ListAppend(rightOperand OperandBuilder) SetValueBuilder { return ListAppend(vb, rightOperand) } // IfNotExists creates a SetValueBuilder to be used in as an argument to Set(). // The first argument must be a NameBuilder representing the name where the new // item attribute is created. The second argument can either be a NameBuilder or // a ValueBuilder. In the case that it is a NameBuilder, the value of the item // attribute at the name specified becomes the value of the new item attribute. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.PreventingAttributeOverwrites // // Example: // // // Use IfNotExists() to set item attribute "someName" to value 5 if // // "someName" does not exist yet. (Prevents overwrite) // update, err := expression.Set(expression.Name("someName"), expression.IfNotExists(expression.Name("someName"), expression.Value(5))) // // Expression Equivalent: // // expression.IfNotExists(expression.Name("someName"), expression.Value(5)) // // let :five be a ExpressionAttributeValue representing the value 5 // "if_not_exists (someName, :five)" func IfNotExists(name NameBuilder, setValue OperandBuilder) SetValueBuilder { return SetValueBuilder{ leftOperand: name, rightOperand: setValue, mode: ifNotExistsValueMode, } } // IfNotExists creates a SetValueBuilder to be used in as an argument to Set(). // The first argument must be a NameBuilder representing the name where the new // item attribute is created. The second argument can either be a NameBuilder or // a ValueBuilder. In the case that it is a NameBuilder, the value of the item // attribute at the name specified becomes the value of the new item attribute. // More information: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.PreventingAttributeOverwrites // // Example: // // // Use IfNotExists() to set item attribute "someName" to value 5 if // // "someName" does not exist yet. (Prevents overwrite) // update, err := expression.Set(expression.Name("someName"), expression.Name("someName").IfNotExists(expression.Value(5))) // // Expression Equivalent: // // expression.Name("someName").IfNotExists(expression.Value(5)) // // let :five be a ExpressionAttributeValue representing the value 5 // "if_not_exists (someName, :five)" func (nb NameBuilder) IfNotExists(rightOperand OperandBuilder) SetValueBuilder { return IfNotExists(nb, rightOperand) } // BuildOperand creates an Operand struct which are building blocks to DynamoDB // Expressions. Package methods and functions can establish relationships // between operands, representing DynamoDB Expressions. The method // BuildOperand() is called recursively when the Build() method on the type // Builder is called. BuildOperand() should never be called externally. // BuildOperand() aliases all strings to avoid stepping over DynamoDB's reserved // words. // More information on reserved words at http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html func (nb NameBuilder) BuildOperand() (Operand, error) { if nb.name == "" { return Operand{}, newUnsetParameterError("BuildOperand", "NameBuilder") } node := exprNode{ names: []string{}, } nameSplit := strings.Split(nb.name, ".") fmtNames := make([]string, 0, len(nameSplit)) for _, word := range nameSplit { var substr string if word == "" { return Operand{}, newInvalidParameterError("BuildOperand", "NameBuilder") } if word[len(word)-1] == ']' { for j, char := range word { if char == '[' { substr = word[j:] word = word[:j] break } } } if word == "" { return Operand{}, newInvalidParameterError("BuildOperand", "NameBuilder") } // Create a string with special characters that can be substituted later: $p node.names = append(node.names, word) fmtNames = append(fmtNames, "$n"+substr) } node.fmtExpr = strings.Join(fmtNames, ".") return Operand{ exprNode: node, }, nil } // BuildOperand creates an Operand struct which are building blocks to DynamoDB // Expressions. Package methods and functions can establish relationships // between operands, representing DynamoDB Expressions. The method // BuildOperand() is called recursively when the Build() method on the type // Builder is called. BuildOperand() should never be called externally. // BuildOperand() aliases all strings to avoid stepping over DynamoDB's reserved // words. // More information on reserved words at http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html func (vb ValueBuilder) BuildOperand() (Operand, error) { expr, err := dynamodbattribute.Marshal(vb.value) if err != nil { return Operand{}, newInvalidParameterError("BuildOperand", "ValueBuilder") } // Create a string with special characters that can be substituted later: $v operand := Operand{ exprNode: exprNode{ values: []dynamodb.AttributeValue{*expr}, fmtExpr: "$v", }, } return operand, nil } // BuildOperand creates an Operand struct which are building blocks to DynamoDB // Expressions. Package methods and functions can establish relationships // between operands, representing DynamoDB Expressions. The method // BuildOperand() is called recursively when the Build() method on the type // Builder is called. BuildOperand() should never be called externally. // BuildOperand() aliases all strings to avoid stepping over DynamoDB's reserved // words. // More information on reserved words at http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html func (sb SizeBuilder) BuildOperand() (Operand, error) { operand, err := sb.nameBuilder.BuildOperand() operand.exprNode.fmtExpr = "size (" + operand.exprNode.fmtExpr + ")" return operand, err } // BuildOperand creates an Operand struct which are building blocks to DynamoDB // Expressions. Package methods and functions can establish relationships // between operands, representing DynamoDB Expressions. The method // BuildOperand() is called recursively when the Build() method on the type // Builder is called. BuildOperand() should never be called externally. // BuildOperand() aliases all strings to avoid stepping over DynamoDB's reserved // words. // More information on reserved words at http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html func (kb KeyBuilder) BuildOperand() (Operand, error) { if kb.key == "" { return Operand{}, newUnsetParameterError("BuildOperand", "KeyBuilder") } ret := Operand{ exprNode: exprNode{ names: []string{kb.key}, fmtExpr: "$n", }, } return ret, nil } // BuildOperand creates an Operand struct which are building blocks to DynamoDB // Expressions. Package methods and functions can establish relationships // between operands, representing DynamoDB Expressions. The method // BuildOperand() is called recursively when the Build() method on the type // Builder is called. BuildOperand() should never be called externally. // BuildOperand() aliases all strings to avoid stepping over DynamoDB's reserved // words. // More information on reserved words at http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html func (svb SetValueBuilder) BuildOperand() (Operand, error) { if svb.mode == unsetValue { return Operand{}, newUnsetParameterError("BuildOperand", "SetValueBuilder") } left, err := svb.leftOperand.BuildOperand() if err != nil { return Operand{}, err } leftNode := left.exprNode right, err := svb.rightOperand.BuildOperand() if err != nil { return Operand{}, err } rightNode := right.exprNode node := exprNode{ children: []exprNode{leftNode, rightNode}, } switch svb.mode { case plusValueMode: node.fmtExpr = "$c + $c" case minusValueMode: node.fmtExpr = "$c - $c" case listAppendValueMode: node.fmtExpr = "list_append($c, $c)" case ifNotExistsValueMode: node.fmtExpr = "if_not_exists($c, $c)" default: return Operand{}, fmt.Errorf("build operand error: unsupported mode: %v", svb.mode) } return Operand{ exprNode: node, }, nil }
{ "pile_set_name": "Github" }
# Arb Arb is a C library for arbitrary-precision interval arithmetic. It has full support for both real and complex numbers. The library is thread-safe, portable, and extensively tested. Arb is free software distributed under the GNU Lesser General Public License (LGPL), version 2.1 or later. ![arb logo](http://fredrikj.net/blog/2015/01/arb-2-5-0-released/arbtext.png) Documentation: http://arblib.org Development updates: http://fredrikj.net/blog/ Author: Fredrik Johansson <[email protected]> Bug reports, feature requests and other comments are welcome in private communication, on the GitHub issue tracker, or on the FLINT mailing list <[email protected]>. [![Build Status](https://travis-ci.org/fredrik-johansson/arb.svg?branch=master)](https://travis-ci.org/fredrik-johansson/arb) ## Code example The following program evaluates `sin(pi + exp(-10000))`. Since the input to the sine function matches a root to within 4343 digits, at least 4343-digit (14427-bit) precision is needed to get an accurate result. The program repeats the evaluation at 64-bit, 128-bit, ... precision, stopping only when the result is accurate to at least 53 bits. #include "arb.h" int main() { slong prec; arb_t x, y; arb_init(x); arb_init(y); for (prec = 64; ; prec *= 2) { arb_const_pi(x, prec); arb_set_si(y, -10000); arb_exp(y, y, prec); arb_add(x, x, y, prec); arb_sin(y, x, prec); arb_printn(y, 15, 0); printf("\n"); if (arb_rel_accuracy_bits(y) >= 53) break; } arb_clear(x); arb_clear(y); flint_cleanup(); } The output is: [+/- 6.01e-19] [+/- 2.55e-38] [+/- 8.01e-77] [+/- 8.64e-154] [+/- 5.37e-308] [+/- 3.63e-616] [+/- 1.07e-1232] [+/- 9.27e-2466] [-1.13548386531474e-4343 +/- 3.91e-4358] Each line shows a rigorous enclosure of the exact value of the expression. The program demonstrates how the user can rely on Arb's automatic error bound tracking to get an output that is guaranteed to be accurate -- no error analysis needs to be done by the user. For more example programs, see: http://arblib.org/examples.html ## Features Besides basic arithmetic, Arb allows working with univariate polynomials, truncated power series, and matrices over both real and complex numbers. Basic linear algebra is supported, including matrix multiplication, determinant, inverse, nonsingular solving, matrix exponential, and computation of eigenvalues and eigenvectors. Support for polynomials and power series is quite extensive, including methods for composition, reversion, product trees, multipoint evaluation and interpolation, complex root isolation, and transcendental functions of power series. Other features include root isolation for real functions, rigorous numerical integration of complex functions, and discrete Fourier transforms (DFTs). ## Special functions Arb can compute a wide range of transcendental and special functions, including the gamma function, polygamma functions, Riemann zeta and Hurwitz zeta function, Dirichlet L-functions, polylogarithm, error function, Gauss hypergeometric function 2F1, confluent hypergeometric functions, Bessel functions, Airy functions, Legendre functions and other orthogonal polynomials, exponential and trigonometric integrals, incomplete gamma and beta functions, Jacobi theta functions, modular functions, Weierstrass elliptic functions, complete and incomplete elliptic integrals, arithmetic-geometric mean, Bernoulli numbers, partition function, Barnes G-function, Lambert W function. ## Speed Arb uses a midpoint-radius (ball) representation of real numbers. At high precision, this allows doing interval arithmetic without significant overhead compared to plain floating-point arithmetic. Various low-level optimizations have also been implemented to reduce overhead at precisions of just a few machine words. Most operations on polynomials and power series use asymptotically fast FFT multiplication based on FLINT. Similarly, most operations on large matrices take advantage of the fast integer matrix multiplication in FLINT. For basic arithmetic, Arb should generally be around as fast as MPFR (http://mpfr.org), though it can be a bit slower at low precision, and around twice as fast as MPFI (https://perso.ens-lyon.fr/nathalie.revol/software.html). Transcendental functions in Arb are quite well optimized and should generally be faster than any other arbitrary-precision software currently available. The following table compares the time in seconds to evaluate the Gauss hypergeometric function `2F1(1/2, 1/4, 1, z)` at the complex number `z = 5^(1/2) + 7^(1/2)i`, to a given number of decimal digits (Arb 2.8-git and mpmath 0.19 on an 1.90 GHz Intel i5-4300U, Mathematica 9.0 on a 3.07 GHz Intel Xeon X5675). | Digits | Mathematica | mpmath | Arb | | -------:|:------------|:-----------|:-----------| | 10 | 0.00066 | 0.00065 | 0.000071 | | 100 | 0.0039 | 0.0012 | 0.00048 | | 1000 | 0.23 | 1.2 | 0.0093 | | 10000 | 42.6 | 84 | 0.56 | ## Dependencies, installation, and interfaces Arb depends on FLINT (http://flintlib.org/), either GMP (http://gmplib.org) or MPIR (http://mpir.org), and MPFR (http://mpfr.org). See http://arblib.org/setup.html for instructions on building and installing Arb directly from the source code. Arb might also be available (or coming soon) as a package for your Linux distribution. SageMath (<http://sagemath.org/>) includes Arb as a standard package and contains a high-level Python interface. See the SageMath documentation for RealBallField (http://doc.sagemath.org/html/en/reference/rings_numerical/sage/rings/real_arb.html) and ComplexBallField (http://doc.sagemath.org/html/en/reference/rings_numerical/sage/rings/complex_arb.html). Nemo (<http://nemocas.org/>) is a computer algebra package for the Julia programming language which includes a high-level Julia interface to Arb. The Nemo installation script will create a local installation of Arb along with other dependencies. A standalone Python interface to FLINT and Arb is also available (<https://github.com/fredrik-johansson/python-flint>). A separate wrapper of transcendental functions for use with the C99 `complex double` type is available (<https://github.com/fredrik-johansson/arbcmath>). Other third-party wrappers include: * A Julia interface: https://github.com/JeffreySarnoff/ArbNumerics.jl * Another Julia interface: https://github.com/JuliaArbTypes/ArbFloats.jl * Java wrapper using JNA: https://github.com/crowlogic/arb/
{ "pile_set_name": "Github" }
<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>netstandard2.0</TargetFramework> </PropertyGroup> </Project>
{ "pile_set_name": "Github" }
# Tenko parser test case - Path: tests/testcases/operator_precedent/sanity_canaries/div_and_lhs_xor_gte_min_lor_land_seq_or_pow.one.rev.md > :: operator precedent : sanity canaries > > ::> div and lhs xor gte min lor land seq or pow.one.rev > > Random ops to test for precedence ## Input `````js x0 / x1 & x2 << x3 ^ x4 >= x5 - x6 || x7 && x8 === x9 | x10 ** x ````` ## Output _Note: the whole output block is auto-generated. Manual changes will be overwritten!_ Below follow outputs in five parsing modes: sloppy, sloppy+annexb, strict script, module, module+annexb. Note that the output parts are auto-generated by the test runner to reflect actual result. ### Sloppy mode Parsed with script goal and as if the code did not start with strict mode header. ````` ast: { type: 'Program', loc:{start:{line:1,column:0},end:{line:1,column:64},source:''}, body: [ { type: 'ExpressionStatement', loc:{start:{line:1,column:0},end:{line:1,column:64},source:''}, expression: { type: 'LogicalExpression', loc:{start:{line:1,column:0},end:{line:1,column:64},source:''}, left: { type: 'BinaryExpression', loc:{start:{line:1,column:0},end:{line:1,column:34},source:''}, left: { type: 'BinaryExpression', loc:{start:{line:1,column:0},end:{line:1,column:18},source:''}, left: { type: 'BinaryExpression', loc:{start:{line:1,column:0},end:{line:1,column:7},source:''}, left: { type: 'Identifier', loc:{start:{line:1,column:0},end:{line:1,column:2},source:''}, name: 'x0' }, operator: '/', right: { type: 'Identifier', loc:{start:{line:1,column:5},end:{line:1,column:7},source:''}, name: 'x1' } }, operator: '&', right: { type: 'BinaryExpression', loc:{start:{line:1,column:10},end:{line:1,column:18},source:''}, left: { type: 'Identifier', loc:{start:{line:1,column:10},end:{line:1,column:12},source:''}, name: 'x2' }, operator: '<<', right: { type: 'Identifier', loc:{start:{line:1,column:16},end:{line:1,column:18},source:''}, name: 'x3' } } }, operator: '^', right: { type: 'BinaryExpression', loc:{start:{line:1,column:21},end:{line:1,column:34},source:''}, left: { type: 'Identifier', loc:{start:{line:1,column:21},end:{line:1,column:23},source:''}, name: 'x4' }, operator: '>=', right: { type: 'BinaryExpression', loc:{start:{line:1,column:27},end:{line:1,column:34},source:''}, left: { type: 'Identifier', loc:{start:{line:1,column:27},end:{line:1,column:29},source:''}, name: 'x5' }, operator: '-', right: { type: 'Identifier', loc:{start:{line:1,column:32},end:{line:1,column:34},source:''}, name: 'x6' } } } }, operator: '||', right: { type: 'LogicalExpression', loc:{start:{line:1,column:38},end:{line:1,column:64},source:''}, left: { type: 'Identifier', loc:{start:{line:1,column:38},end:{line:1,column:40},source:''}, name: 'x7' }, operator: '&&', right: { type: 'BinaryExpression', loc:{start:{line:1,column:44},end:{line:1,column:64},source:''}, left: { type: 'BinaryExpression', loc:{start:{line:1,column:44},end:{line:1,column:53},source:''}, left: { type: 'Identifier', loc:{start:{line:1,column:44},end:{line:1,column:46},source:''}, name: 'x8' }, operator: '===', right: { type: 'Identifier', loc:{start:{line:1,column:51},end:{line:1,column:53},source:''}, name: 'x9' } }, operator: '|', right: { type: 'BinaryExpression', loc:{start:{line:1,column:56},end:{line:1,column:64},source:''}, left: { type: 'Identifier', loc:{start:{line:1,column:56},end:{line:1,column:59},source:''}, name: 'x10' }, operator: '**', right: { type: 'Identifier', loc:{start:{line:1,column:63},end:{line:1,column:64},source:''}, name: 'x' } } } } } } ] } tokens (25x): IDENT PUNC_DIV IDENT PUNC_AND IDENT PUNC_LT_LT IDENT PUNC_CARET IDENT PUNC_GT_EQ IDENT PUNC_MIN IDENT PUNC_OR_OR IDENT PUNC_AND_AND IDENT PUNC_EQ_EQ_EQ IDENT PUNC_OR IDENT PUNC_STAR_STAR IDENT ASI ````` ### Strict mode Parsed with script goal but as if it was starting with `"use strict"` at the top. _Output same as sloppy mode._ ### Module goal Parsed with the module goal. _Output same as sloppy mode._ ### Sloppy mode with AnnexB Parsed with script goal with AnnexB rules enabled and as if the code did not start with strict mode header. _Output same as sloppy mode._ ### Module goal with AnnexB Parsed with the module goal with AnnexB rules enabled. _Output same as sloppy mode._ ## AST Printer Printer output different from input [sloppy][annexb:no]: ````js ((((x0 / x1) & (x2 << x3)) ^ (x4 >= (x5 - x6))) || ((x7 && ((x8 === x9) | (x10 ** x))))); ```` Produces same AST
{ "pile_set_name": "Github" }
{ "url": "https://api.github.com/repos/hub4j-test-org/github-api/milestones/2", "html_url": "https://github.com/hub4j-test-org/github-api/milestone/2", "labels_url": "https://api.github.com/repos/hub4j-test-org/github-api/milestones/2/labels", "id": 4800108, "node_id": "MDk6TWlsZXN0b25lNDgwMDEwOA==", "number": 2, "title": "Updated Title", "description": "To test the update methods", "creator": { "login": "bitwiseman", "id": 1958953, "node_id": "MDQ6VXNlcjE5NTg5NTM=", "avatar_url": "https://avatars3.githubusercontent.com/u/1958953?v=4", "gravatar_id": "", "url": "https://api.github.com/users/bitwiseman", "html_url": "https://github.com/bitwiseman", "followers_url": "https://api.github.com/users/bitwiseman/followers", "following_url": "https://api.github.com/users/bitwiseman/following{/other_user}", "gists_url": "https://api.github.com/users/bitwiseman/gists{/gist_id}", "starred_url": "https://api.github.com/users/bitwiseman/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/bitwiseman/subscriptions", "organizations_url": "https://api.github.com/users/bitwiseman/orgs", "repos_url": "https://api.github.com/users/bitwiseman/repos", "events_url": "https://api.github.com/users/bitwiseman/events{/privacy}", "received_events_url": "https://api.github.com/users/bitwiseman/received_events", "type": "User", "site_admin": false }, "open_issues": 0, "closed_issues": 0, "state": "open", "created_at": "2019-10-30T22:04:20Z", "updated_at": "2019-10-30T22:04:21Z", "due_on": null, "closed_at": null }
{ "pile_set_name": "Github" }
Microsoft Visual Studio Solution File, Format Version 10.00 # Visual Studio 2008 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Util", "Util_x64_vs90.vcproj", "{6FF56CDB-787A-4714-A28C-919003F9FA6C}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "TestSuite", "testsuite\TestSuite_x64_vs90.vcproj", "{E40E738C-447B-40F4-A878-EBA9A2459270}" ProjectSection(ProjectDependencies) = postProject {6FF56CDB-787A-4714-A28C-919003F9FA6C} = {6FF56CDB-787A-4714-A28C-919003F9FA6C} EndProjectSection EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution debug_shared|x64 = debug_shared|x64 release_shared|x64 = release_shared|x64 debug_static_mt|x64 = debug_static_mt|x64 release_static_mt|x64 = release_static_mt|x64 debug_static_md|x64 = debug_static_md|x64 release_static_md|x64 = release_static_md|x64 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_shared|x64.ActiveCfg = debug_shared|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_shared|x64.Build.0 = debug_shared|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_shared|x64.Deploy.0 = debug_shared|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_shared|x64.ActiveCfg = release_shared|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_shared|x64.Build.0 = release_shared|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_shared|x64.Deploy.0 = release_shared|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_static_mt|x64.ActiveCfg = debug_static_mt|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_static_mt|x64.Build.0 = debug_static_mt|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_static_mt|x64.Deploy.0 = debug_static_mt|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_static_mt|x64.ActiveCfg = release_static_mt|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_static_mt|x64.Build.0 = release_static_mt|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_static_mt|x64.Deploy.0 = release_static_mt|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_static_md|x64.ActiveCfg = debug_static_md|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_static_md|x64.Build.0 = debug_static_md|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.debug_static_md|x64.Deploy.0 = debug_static_md|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_static_md|x64.ActiveCfg = release_static_md|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_static_md|x64.Build.0 = release_static_md|x64 {6FF56CDB-787A-4714-A28C-919003F9FA6C}.release_static_md|x64.Deploy.0 = release_static_md|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_shared|x64.ActiveCfg = debug_shared|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_shared|x64.Build.0 = debug_shared|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_shared|x64.Deploy.0 = debug_shared|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_shared|x64.ActiveCfg = release_shared|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_shared|x64.Build.0 = release_shared|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_shared|x64.Deploy.0 = release_shared|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_static_mt|x64.ActiveCfg = debug_static_mt|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_static_mt|x64.Build.0 = debug_static_mt|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_static_mt|x64.Deploy.0 = debug_static_mt|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_static_mt|x64.ActiveCfg = release_static_mt|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_static_mt|x64.Build.0 = release_static_mt|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_static_mt|x64.Deploy.0 = release_static_mt|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_static_md|x64.ActiveCfg = debug_static_md|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_static_md|x64.Build.0 = debug_static_md|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.debug_static_md|x64.Deploy.0 = debug_static_md|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_static_md|x64.ActiveCfg = release_static_md|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_static_md|x64.Build.0 = release_static_md|x64 {E40E738C-447B-40F4-A878-EBA9A2459270}.release_static_md|x64.Deploy.0 = release_static_md|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal
{ "pile_set_name": "Github" }
/* * Copyright © 2017 Google, Inc. * * This is part of HarfBuzz, a text shaping library. * * Permission is hereby granted, without written agreement and without * license or royalty fees, to use, copy, modify, and distribute this * software and its documentation for any purpose, provided that the * above copyright notice and the following two paragraphs appear in * all copies of this software. * * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. * * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. * * Google Author(s): Behdad Esfahbod */ #ifndef HB_AAT_LAYOUT_MORX_TABLE_HH #define HB_AAT_LAYOUT_MORX_TABLE_HH #include "hb-open-type.hh" #include "hb-aat-layout-common.hh" #include "hb-ot-layout-common.hh" #include "hb-aat-map.hh" /* * morx -- Extended Glyph Metamorphosis * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6morx.html * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6mort.html */ #define HB_AAT_TAG_morx HB_TAG('m','o','r','x') #define HB_AAT_TAG_mort HB_TAG('m','o','r','t') namespace AAT { using namespace OT; template <typename Types> struct RearrangementSubtable { typedef typename Types::HBUINT HBUINT; typedef void EntryData; struct driver_context_t { static constexpr bool in_place = true; enum Flags { MarkFirst = 0x8000, /* If set, make the current glyph the first * glyph to be rearranged. */ DontAdvance = 0x4000, /* If set, don't advance to the next glyph * before going to the new state. This means * that the glyph index doesn't change, even * if the glyph at that index has changed. */ MarkLast = 0x2000, /* If set, make the current glyph the last * glyph to be rearranged. */ Reserved = 0x1FF0, /* These bits are reserved and should be set to 0. */ Verb = 0x000F, /* The type of rearrangement specified. */ }; driver_context_t (const RearrangementSubtable *table HB_UNUSED) : ret (false), start (0), end (0) {} bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED, const Entry<EntryData> &entry) { return (entry.flags & Verb) && start < end; } void transition (StateTableDriver<Types, EntryData> *driver, const Entry<EntryData> &entry) { hb_buffer_t *buffer = driver->buffer; unsigned int flags = entry.flags; if (flags & MarkFirst) start = buffer->idx; if (flags & MarkLast) end = hb_min (buffer->idx + 1, buffer->len); if ((flags & Verb) && start < end) { /* The following map has two nibbles, for start-side * and end-side. Values of 0,1,2 mean move that many * to the other side. Value of 3 means move 2 and * flip them. */ const unsigned char map[16] = { 0x00, /* 0 no change */ 0x10, /* 1 Ax => xA */ 0x01, /* 2 xD => Dx */ 0x11, /* 3 AxD => DxA */ 0x20, /* 4 ABx => xAB */ 0x30, /* 5 ABx => xBA */ 0x02, /* 6 xCD => CDx */ 0x03, /* 7 xCD => DCx */ 0x12, /* 8 AxCD => CDxA */ 0x13, /* 9 AxCD => DCxA */ 0x21, /* 10 ABxD => DxAB */ 0x31, /* 11 ABxD => DxBA */ 0x22, /* 12 ABxCD => CDxAB */ 0x32, /* 13 ABxCD => CDxBA */ 0x23, /* 14 ABxCD => DCxAB */ 0x33, /* 15 ABxCD => DCxBA */ }; unsigned int m = map[flags & Verb]; unsigned int l = hb_min (2u, m >> 4); unsigned int r = hb_min (2u, m & 0x0F); bool reverse_l = 3 == (m >> 4); bool reverse_r = 3 == (m & 0x0F); if (end - start >= l + r) { buffer->merge_clusters (start, hb_min (buffer->idx + 1, buffer->len)); buffer->merge_clusters (start, end); hb_glyph_info_t *info = buffer->info; hb_glyph_info_t buf[4]; memcpy (buf, info + start, l * sizeof (buf[0])); memcpy (buf + 2, info + end - r, r * sizeof (buf[0])); if (l != r) memmove (info + start + r, info + start + l, (end - start - l - r) * sizeof (buf[0])); memcpy (info + start, buf + 2, r * sizeof (buf[0])); memcpy (info + end - l, buf, l * sizeof (buf[0])); if (reverse_l) { buf[0] = info[end - 1]; info[end - 1] = info[end - 2]; info[end - 2] = buf[0]; } if (reverse_r) { buf[0] = info[start]; info[start] = info[start + 1]; info[start + 1] = buf[0]; } } } } public: bool ret; private: unsigned int start; unsigned int end; }; bool apply (hb_aat_apply_context_t *c) const { TRACE_APPLY (this); driver_context_t dc (this); StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face); driver.drive (&dc); return_trace (dc.ret); } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); return_trace (machine.sanitize (c)); } protected: StateTable<Types, EntryData> machine; public: DEFINE_SIZE_STATIC (16); }; template <typename Types> struct ContextualSubtable { typedef typename Types::HBUINT HBUINT; struct EntryData { HBUINT16 markIndex; /* Index of the substitution table for the * marked glyph (use 0xFFFF for none). */ HBUINT16 currentIndex; /* Index of the substitution table for the * current glyph (use 0xFFFF for none). */ public: DEFINE_SIZE_STATIC (4); }; struct driver_context_t { static constexpr bool in_place = true; enum Flags { SetMark = 0x8000, /* If set, make the current glyph the marked glyph. */ DontAdvance = 0x4000, /* If set, don't advance to the next glyph before * going to the new state. */ Reserved = 0x3FFF, /* These bits are reserved and should be set to 0. */ }; driver_context_t (const ContextualSubtable *table_, hb_aat_apply_context_t *c_) : ret (false), c (c_), mark_set (false), mark (0), table (table_), subs (table+table->substitutionTables) {} bool is_actionable (StateTableDriver<Types, EntryData> *driver, const Entry<EntryData> &entry) { hb_buffer_t *buffer = driver->buffer; if (buffer->idx == buffer->len && !mark_set) return false; return entry.data.markIndex != 0xFFFF || entry.data.currentIndex != 0xFFFF; } void transition (StateTableDriver<Types, EntryData> *driver, const Entry<EntryData> &entry) { hb_buffer_t *buffer = driver->buffer; /* Looks like CoreText applies neither mark nor current substitution for * end-of-text if mark was not explicitly set. */ if (buffer->idx == buffer->len && !mark_set) return; const HBGlyphID *replacement; replacement = nullptr; if (Types::extended) { if (entry.data.markIndex != 0xFFFF) { const Lookup<HBGlyphID> &lookup = subs[entry.data.markIndex]; replacement = lookup.get_value (buffer->info[mark].codepoint, driver->num_glyphs); } } else { unsigned int offset = entry.data.markIndex + buffer->info[mark].codepoint; const UnsizedArrayOf<HBGlyphID> &subs_old = (const UnsizedArrayOf<HBGlyphID> &) subs; replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)]; if (!replacement->sanitize (&c->sanitizer) || !*replacement) replacement = nullptr; } if (replacement) { buffer->unsafe_to_break (mark, hb_min (buffer->idx + 1, buffer->len)); buffer->info[mark].codepoint = *replacement; ret = true; } replacement = nullptr; unsigned int idx = hb_min (buffer->idx, buffer->len - 1); if (Types::extended) { if (entry.data.currentIndex != 0xFFFF) { const Lookup<HBGlyphID> &lookup = subs[entry.data.currentIndex]; replacement = lookup.get_value (buffer->info[idx].codepoint, driver->num_glyphs); } } else { unsigned int offset = entry.data.currentIndex + buffer->info[idx].codepoint; const UnsizedArrayOf<HBGlyphID> &subs_old = (const UnsizedArrayOf<HBGlyphID> &) subs; replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)]; if (!replacement->sanitize (&c->sanitizer) || !*replacement) replacement = nullptr; } if (replacement) { buffer->info[idx].codepoint = *replacement; ret = true; } if (entry.flags & SetMark) { mark_set = true; mark = buffer->idx; } } public: bool ret; private: hb_aat_apply_context_t *c; bool mark_set; unsigned int mark; const ContextualSubtable *table; const UnsizedOffsetListOf<Lookup<HBGlyphID>, HBUINT, false> &subs; }; bool apply (hb_aat_apply_context_t *c) const { TRACE_APPLY (this); driver_context_t dc (this, c); StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face); driver.drive (&dc); return_trace (dc.ret); } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); unsigned int num_entries = 0; if (unlikely (!machine.sanitize (c, &num_entries))) return_trace (false); if (!Types::extended) return_trace (substitutionTables.sanitize (c, this, 0)); unsigned int num_lookups = 0; const Entry<EntryData> *entries = machine.get_entries (); for (unsigned int i = 0; i < num_entries; i++) { const EntryData &data = entries[i].data; if (data.markIndex != 0xFFFF) num_lookups = hb_max (num_lookups, 1 + data.markIndex); if (data.currentIndex != 0xFFFF) num_lookups = hb_max (num_lookups, 1 + data.currentIndex); } return_trace (substitutionTables.sanitize (c, this, num_lookups)); } protected: StateTable<Types, EntryData> machine; NNOffsetTo<UnsizedOffsetListOf<Lookup<HBGlyphID>, HBUINT, false>, HBUINT> substitutionTables; public: DEFINE_SIZE_STATIC (20); }; template <bool extended> struct LigatureEntry; template <> struct LigatureEntry<true> { enum Flags { SetComponent = 0x8000, /* Push this glyph onto the component stack for * eventual processing. */ DontAdvance = 0x4000, /* Leave the glyph pointer at this glyph for the next iteration. */ PerformAction = 0x2000, /* Use the ligActionIndex to process a ligature * group. */ Reserved = 0x1FFF, /* These bits are reserved and should be set to 0. */ }; struct EntryData { HBUINT16 ligActionIndex; /* Index to the first ligActionTable entry * for processing this group, if indicated * by the flags. */ public: DEFINE_SIZE_STATIC (2); }; static bool performAction (const Entry<EntryData> &entry) { return entry.flags & PerformAction; } static unsigned int ligActionIndex (const Entry<EntryData> &entry) { return entry.data.ligActionIndex; } }; template <> struct LigatureEntry<false> { enum Flags { SetComponent = 0x8000, /* Push this glyph onto the component stack for * eventual processing. */ DontAdvance = 0x4000, /* Leave the glyph pointer at this glyph for the next iteration. */ Offset = 0x3FFF, /* Byte offset from beginning of subtable to the * ligature action list. This value must be a * multiple of 4. */ }; typedef void EntryData; static bool performAction (const Entry<EntryData> &entry) { return entry.flags & Offset; } static unsigned int ligActionIndex (const Entry<EntryData> &entry) { return entry.flags & Offset; } }; template <typename Types> struct LigatureSubtable { typedef typename Types::HBUINT HBUINT; typedef LigatureEntry<Types::extended> LigatureEntryT; typedef typename LigatureEntryT::EntryData EntryData; struct driver_context_t { static constexpr bool in_place = false; enum { DontAdvance = LigatureEntryT::DontAdvance, }; enum LigActionFlags { LigActionLast = 0x80000000, /* This is the last action in the list. This also * implies storage. */ LigActionStore = 0x40000000, /* Store the ligature at the current cumulated index * in the ligature table in place of the marked * (i.e. currently-popped) glyph. */ LigActionOffset = 0x3FFFFFFF, /* A 30-bit value which is sign-extended to 32-bits * and added to the glyph ID, resulting in an index * into the component table. */ }; driver_context_t (const LigatureSubtable *table_, hb_aat_apply_context_t *c_) : ret (false), c (c_), table (table_), ligAction (table+table->ligAction), component (table+table->component), ligature (table+table->ligature), match_length (0) {} bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED, const Entry<EntryData> &entry) { return LigatureEntryT::performAction (entry); } void transition (StateTableDriver<Types, EntryData> *driver, const Entry<EntryData> &entry) { hb_buffer_t *buffer = driver->buffer; DEBUG_MSG (APPLY, nullptr, "Ligature transition at %u", buffer->idx); if (entry.flags & LigatureEntryT::SetComponent) { /* Never mark same index twice, in case DontAdvance was used... */ if (match_length && match_positions[(match_length - 1u) % ARRAY_LENGTH (match_positions)] == buffer->out_len) match_length--; match_positions[match_length++ % ARRAY_LENGTH (match_positions)] = buffer->out_len; DEBUG_MSG (APPLY, nullptr, "Set component at %u", buffer->out_len); } if (LigatureEntryT::performAction (entry)) { DEBUG_MSG (APPLY, nullptr, "Perform action with %u", match_length); unsigned int end = buffer->out_len; if (unlikely (!match_length)) return; if (buffer->idx >= buffer->len) return; /* TODO Work on previous instead? */ unsigned int cursor = match_length; unsigned int action_idx = LigatureEntryT::ligActionIndex (entry); action_idx = Types::offsetToIndex (action_idx, table, ligAction.arrayZ); const HBUINT32 *actionData = &ligAction[action_idx]; unsigned int ligature_idx = 0; unsigned int action; do { if (unlikely (!cursor)) { /* Stack underflow. Clear the stack. */ DEBUG_MSG (APPLY, nullptr, "Stack underflow"); match_length = 0; break; } DEBUG_MSG (APPLY, nullptr, "Moving to stack position %u", cursor - 1); buffer->move_to (match_positions[--cursor % ARRAY_LENGTH (match_positions)]); if (unlikely (!actionData->sanitize (&c->sanitizer))) break; action = *actionData; uint32_t uoffset = action & LigActionOffset; if (uoffset & 0x20000000) uoffset |= 0xC0000000; /* Sign-extend. */ int32_t offset = (int32_t) uoffset; unsigned int component_idx = buffer->cur().codepoint + offset; component_idx = Types::wordOffsetToIndex (component_idx, table, component.arrayZ); const HBUINT16 &componentData = component[component_idx]; if (unlikely (!componentData.sanitize (&c->sanitizer))) break; ligature_idx += componentData; DEBUG_MSG (APPLY, nullptr, "Action store %u last %u", bool (action & LigActionStore), bool (action & LigActionLast)); if (action & (LigActionStore | LigActionLast)) { ligature_idx = Types::offsetToIndex (ligature_idx, table, ligature.arrayZ); const HBGlyphID &ligatureData = ligature[ligature_idx]; if (unlikely (!ligatureData.sanitize (&c->sanitizer))) break; hb_codepoint_t lig = ligatureData; DEBUG_MSG (APPLY, nullptr, "Produced ligature %u", lig); buffer->replace_glyph (lig); unsigned int lig_end = match_positions[(match_length - 1u) % ARRAY_LENGTH (match_positions)] + 1u; /* Now go and delete all subsequent components. */ while (match_length - 1u > cursor) { DEBUG_MSG (APPLY, nullptr, "Skipping ligature component"); buffer->move_to (match_positions[--match_length % ARRAY_LENGTH (match_positions)]); buffer->replace_glyph (DELETED_GLYPH); } buffer->move_to (lig_end); buffer->merge_out_clusters (match_positions[cursor % ARRAY_LENGTH (match_positions)], buffer->out_len); } actionData++; } while (!(action & LigActionLast)); buffer->move_to (end); } } public: bool ret; private: hb_aat_apply_context_t *c; const LigatureSubtable *table; const UnsizedArrayOf<HBUINT32> &ligAction; const UnsizedArrayOf<HBUINT16> &component; const UnsizedArrayOf<HBGlyphID> &ligature; unsigned int match_length; unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; }; bool apply (hb_aat_apply_context_t *c) const { TRACE_APPLY (this); driver_context_t dc (this, c); StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face); driver.drive (&dc); return_trace (dc.ret); } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); /* The rest of array sanitizations are done at run-time. */ return_trace (c->check_struct (this) && machine.sanitize (c) && ligAction && component && ligature); } protected: StateTable<Types, EntryData> machine; NNOffsetTo<UnsizedArrayOf<HBUINT32>, HBUINT> ligAction; /* Offset to the ligature action table. */ NNOffsetTo<UnsizedArrayOf<HBUINT16>, HBUINT> component; /* Offset to the component table. */ NNOffsetTo<UnsizedArrayOf<HBGlyphID>, HBUINT> ligature; /* Offset to the actual ligature lists. */ public: DEFINE_SIZE_STATIC (28); }; template <typename Types> struct NoncontextualSubtable { bool apply (hb_aat_apply_context_t *c) const { TRACE_APPLY (this); bool ret = false; unsigned int num_glyphs = c->face->get_num_glyphs (); hb_glyph_info_t *info = c->buffer->info; unsigned int count = c->buffer->len; for (unsigned int i = 0; i < count; i++) { const HBGlyphID *replacement = substitute.get_value (info[i].codepoint, num_glyphs); if (replacement) { info[i].codepoint = *replacement; ret = true; } } return_trace (ret); } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); return_trace (substitute.sanitize (c)); } protected: Lookup<HBGlyphID> substitute; public: DEFINE_SIZE_MIN (2); }; template <typename Types> struct InsertionSubtable { typedef typename Types::HBUINT HBUINT; struct EntryData { HBUINT16 currentInsertIndex; /* Zero-based index into the insertion glyph table. * The number of glyphs to be inserted is contained * in the currentInsertCount field in the flags. * A value of 0xFFFF indicates no insertion is to * be done. */ HBUINT16 markedInsertIndex; /* Zero-based index into the insertion glyph table. * The number of glyphs to be inserted is contained * in the markedInsertCount field in the flags. * A value of 0xFFFF indicates no insertion is to * be done. */ public: DEFINE_SIZE_STATIC (4); }; struct driver_context_t { static constexpr bool in_place = false; enum Flags { SetMark = 0x8000, /* If set, mark the current glyph. */ DontAdvance = 0x4000, /* If set, don't advance to the next glyph before * going to the new state. This does not mean * that the glyph pointed to is the same one as * before. If you've made insertions immediately * downstream of the current glyph, the next glyph * processed would in fact be the first one * inserted. */ CurrentIsKashidaLike= 0x2000, /* If set, and the currentInsertList is nonzero, * then the specified glyph list will be inserted * as a kashida-like insertion, either before or * after the current glyph (depending on the state * of the currentInsertBefore flag). If clear, and * the currentInsertList is nonzero, then the * specified glyph list will be inserted as a * split-vowel-like insertion, either before or * after the current glyph (depending on the state * of the currentInsertBefore flag). */ MarkedIsKashidaLike= 0x1000, /* If set, and the markedInsertList is nonzero, * then the specified glyph list will be inserted * as a kashida-like insertion, either before or * after the marked glyph (depending on the state * of the markedInsertBefore flag). If clear, and * the markedInsertList is nonzero, then the * specified glyph list will be inserted as a * split-vowel-like insertion, either before or * after the marked glyph (depending on the state * of the markedInsertBefore flag). */ CurrentInsertBefore= 0x0800, /* If set, specifies that insertions are to be made * to the left of the current glyph. If clear, * they're made to the right of the current glyph. */ MarkedInsertBefore= 0x0400, /* If set, specifies that insertions are to be * made to the left of the marked glyph. If clear, * they're made to the right of the marked glyph. */ CurrentInsertCount= 0x3E0, /* This 5-bit field is treated as a count of the * number of glyphs to insert at the current * position. Since zero means no insertions, the * largest number of insertions at any given * current location is 31 glyphs. */ MarkedInsertCount= 0x001F, /* This 5-bit field is treated as a count of the * number of glyphs to insert at the marked * position. Since zero means no insertions, the * largest number of insertions at any given * marked location is 31 glyphs. */ }; driver_context_t (const InsertionSubtable *table, hb_aat_apply_context_t *c_) : ret (false), c (c_), mark (0), insertionAction (table+table->insertionAction) {} bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED, const Entry<EntryData> &entry) { return (entry.flags & (CurrentInsertCount | MarkedInsertCount)) && (entry.data.currentInsertIndex != 0xFFFF ||entry.data.markedInsertIndex != 0xFFFF); } void transition (StateTableDriver<Types, EntryData> *driver, const Entry<EntryData> &entry) { hb_buffer_t *buffer = driver->buffer; unsigned int flags = entry.flags; unsigned mark_loc = buffer->out_len; if (entry.data.markedInsertIndex != 0xFFFF) { unsigned int count = (flags & MarkedInsertCount); unsigned int start = entry.data.markedInsertIndex; const HBGlyphID *glyphs = &insertionAction[start]; if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0; bool before = flags & MarkedInsertBefore; unsigned int end = buffer->out_len; buffer->move_to (mark); if (buffer->idx < buffer->len && !before) buffer->copy_glyph (); /* TODO We ignore KashidaLike setting. */ for (unsigned int i = 0; i < count; i++) buffer->output_glyph (glyphs[i]); if (buffer->idx < buffer->len && !before) buffer->skip_glyph (); buffer->move_to (end + count); buffer->unsafe_to_break_from_outbuffer (mark, hb_min (buffer->idx + 1, buffer->len)); } if (flags & SetMark) mark = mark_loc; if (entry.data.currentInsertIndex != 0xFFFF) { unsigned int count = (flags & CurrentInsertCount) >> 5; unsigned int start = entry.data.currentInsertIndex; const HBGlyphID *glyphs = &insertionAction[start]; if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0; bool before = flags & CurrentInsertBefore; unsigned int end = buffer->out_len; if (buffer->idx < buffer->len && !before) buffer->copy_glyph (); /* TODO We ignore KashidaLike setting. */ for (unsigned int i = 0; i < count; i++) buffer->output_glyph (glyphs[i]); if (buffer->idx < buffer->len && !before) buffer->skip_glyph (); /* Humm. Not sure where to move to. There's this wording under * DontAdvance flag: * * "If set, don't update the glyph index before going to the new state. * This does not mean that the glyph pointed to is the same one as * before. If you've made insertions immediately downstream of the * current glyph, the next glyph processed would in fact be the first * one inserted." * * This suggests that if DontAdvance is NOT set, we should move to * end+count. If it *was*, then move to end, such that newly inserted * glyphs are now visible. * * https://github.com/harfbuzz/harfbuzz/issues/1224#issuecomment-427691417 */ buffer->move_to ((flags & DontAdvance) ? end : end + count); } } public: bool ret; private: hb_aat_apply_context_t *c; unsigned int mark; const UnsizedArrayOf<HBGlyphID> &insertionAction; }; bool apply (hb_aat_apply_context_t *c) const { TRACE_APPLY (this); driver_context_t dc (this, c); StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face); driver.drive (&dc); return_trace (dc.ret); } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); /* The rest of array sanitizations are done at run-time. */ return_trace (c->check_struct (this) && machine.sanitize (c) && insertionAction); } protected: StateTable<Types, EntryData> machine; NNOffsetTo<UnsizedArrayOf<HBGlyphID>, HBUINT> insertionAction; /* Byte offset from stateHeader to the start of * the insertion glyph table. */ public: DEFINE_SIZE_STATIC (20); }; struct Feature { bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); return_trace (c->check_struct (this)); } public: HBUINT16 featureType; /* The type of feature. */ HBUINT16 featureSetting; /* The feature's setting (aka selector). */ HBUINT32 enableFlags; /* Flags for the settings that this feature * and setting enables. */ HBUINT32 disableFlags; /* Complement of flags for the settings that this * feature and setting disable. */ public: DEFINE_SIZE_STATIC (12); }; template <typename Types> struct ChainSubtable { typedef typename Types::HBUINT HBUINT; template <typename T> friend struct Chain; unsigned int get_size () const { return length; } unsigned int get_type () const { return coverage & 0xFF; } unsigned int get_coverage () const { return coverage >> (sizeof (HBUINT) * 8 - 8); } enum Coverage { Vertical = 0x80, /* If set, this subtable will only be applied * to vertical text. If clear, this subtable * will only be applied to horizontal text. */ Backwards = 0x40, /* If set, this subtable will process glyphs * in descending order. If clear, it will * process the glyphs in ascending order. */ AllDirections = 0x20, /* If set, this subtable will be applied to * both horizontal and vertical text (i.e. * the state of bit 0x80000000 is ignored). */ Logical = 0x10, /* If set, this subtable will process glyphs * in logical order (or reverse logical order, * depending on the value of bit 0x80000000). */ }; enum Type { Rearrangement = 0, Contextual = 1, Ligature = 2, Noncontextual = 4, Insertion = 5 }; template <typename context_t, typename ...Ts> typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { unsigned int subtable_type = get_type (); TRACE_DISPATCH (this, subtable_type); switch (subtable_type) { case Rearrangement: return_trace (c->dispatch (u.rearrangement, hb_forward<Ts> (ds)...)); case Contextual: return_trace (c->dispatch (u.contextual, hb_forward<Ts> (ds)...)); case Ligature: return_trace (c->dispatch (u.ligature, hb_forward<Ts> (ds)...)); case Noncontextual: return_trace (c->dispatch (u.noncontextual, hb_forward<Ts> (ds)...)); case Insertion: return_trace (c->dispatch (u.insertion, hb_forward<Ts> (ds)...)); default: return_trace (c->default_return_value ()); } } bool apply (hb_aat_apply_context_t *c) const { TRACE_APPLY (this); hb_sanitize_with_object_t with (&c->sanitizer, this); return_trace (dispatch (c)); } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); if (!length.sanitize (c) || length <= min_size || !c->check_range (this, length)) return_trace (false); hb_sanitize_with_object_t with (c, this); return_trace (dispatch (c)); } protected: HBUINT length; /* Total subtable length, including this header. */ HBUINT coverage; /* Coverage flags and subtable type. */ HBUINT32 subFeatureFlags;/* The 32-bit mask identifying which subtable this is. */ union { RearrangementSubtable<Types> rearrangement; ContextualSubtable<Types> contextual; LigatureSubtable<Types> ligature; NoncontextualSubtable<Types> noncontextual; InsertionSubtable<Types> insertion; } u; public: DEFINE_SIZE_MIN (2 * sizeof (HBUINT) + 4); }; template <typename Types> struct Chain { typedef typename Types::HBUINT HBUINT; hb_mask_t compile_flags (const hb_aat_map_builder_t *map) const { hb_mask_t flags = defaultFlags; { unsigned int count = featureCount; for (unsigned i = 0; i < count; i++) { const Feature &feature = featureZ[i]; hb_aat_layout_feature_type_t type = (hb_aat_layout_feature_type_t) (unsigned int) feature.featureType; hb_aat_layout_feature_selector_t setting = (hb_aat_layout_feature_selector_t) (unsigned int) feature.featureSetting; retry: const hb_aat_map_builder_t::feature_info_t *info = map->features.bsearch (type); if (info && info->setting == setting) { flags &= feature.disableFlags; flags |= feature.enableFlags; } else if (type == HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE && setting == HB_AAT_LAYOUT_FEATURE_SELECTOR_SMALL_CAPS) { /* Deprecated. https://github.com/harfbuzz/harfbuzz/issues/1342 */ type = HB_AAT_LAYOUT_FEATURE_TYPE_LOWER_CASE; setting = HB_AAT_LAYOUT_FEATURE_SELECTOR_LOWER_CASE_SMALL_CAPS; goto retry; } } } return flags; } void apply (hb_aat_apply_context_t *c, hb_mask_t flags) const { const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types>> (featureZ.as_array (featureCount)); unsigned int count = subtableCount; for (unsigned int i = 0; i < count; i++) { bool reverse; if (!(subtable->subFeatureFlags & flags)) goto skip; if (!(subtable->get_coverage() & ChainSubtable<Types>::AllDirections) && HB_DIRECTION_IS_VERTICAL (c->buffer->props.direction) != bool (subtable->get_coverage() & ChainSubtable<Types>::Vertical)) goto skip; /* Buffer contents is always in logical direction. Determine if * we need to reverse before applying this subtable. We reverse * back after if we did reverse indeed. * * Quoting the spac: * """ * Bits 28 and 30 of the coverage field control the order in which * glyphs are processed when the subtable is run by the layout engine. * Bit 28 is used to indicate if the glyph processing direction is * the same as logical order or layout order. Bit 30 is used to * indicate whether glyphs are processed forwards or backwards within * that order. Bit 30 Bit 28 Interpretation for Horizontal Text 0 0 The subtable is processed in layout order (the same order as the glyphs, which is always left-to-right). 1 0 The subtable is processed in reverse layout order (the order opposite that of the glyphs, which is always right-to-left). 0 1 The subtable is processed in logical order (the same order as the characters, which may be left-to-right or right-to-left). 1 1 The subtable is processed in reverse logical order (the order opposite that of the characters, which may be right-to-left or left-to-right). */ reverse = subtable->get_coverage () & ChainSubtable<Types>::Logical ? bool (subtable->get_coverage () & ChainSubtable<Types>::Backwards) : bool (subtable->get_coverage () & ChainSubtable<Types>::Backwards) != HB_DIRECTION_IS_BACKWARD (c->buffer->props.direction); if (!c->buffer->message (c->font, "start chain subtable %d", c->lookup_index)) goto skip; if (reverse) c->buffer->reverse (); subtable->apply (c); if (reverse) c->buffer->reverse (); (void) c->buffer->message (c->font, "end chain subtable %d", c->lookup_index); if (unlikely (!c->buffer->successful)) return; skip: subtable = &StructAfter<ChainSubtable<Types>> (*subtable); c->set_lookup_index (c->lookup_index + 1); } } unsigned int get_size () const { return length; } bool sanitize (hb_sanitize_context_t *c, unsigned int version HB_UNUSED) const { TRACE_SANITIZE (this); if (!length.sanitize (c) || length < min_size || !c->check_range (this, length)) return_trace (false); if (!c->check_array (featureZ.arrayZ, featureCount)) return_trace (false); const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types>> (featureZ.as_array (featureCount)); unsigned int count = subtableCount; for (unsigned int i = 0; i < count; i++) { if (!subtable->sanitize (c)) return_trace (false); subtable = &StructAfter<ChainSubtable<Types>> (*subtable); } return_trace (true); } protected: HBUINT32 defaultFlags; /* The default specification for subtables. */ HBUINT32 length; /* Total byte count, including this header. */ HBUINT featureCount; /* Number of feature subtable entries. */ HBUINT subtableCount; /* The number of subtables in the chain. */ UnsizedArrayOf<Feature> featureZ; /* Features. */ /*ChainSubtable firstSubtable;*//* Subtables. */ /*subtableGlyphCoverageArray*/ /* Only if version >= 3. We don't use. */ public: DEFINE_SIZE_MIN (8 + 2 * sizeof (HBUINT)); }; /* * The 'mort'/'morx' Table */ template <typename Types, hb_tag_t TAG> struct mortmorx { static constexpr hb_tag_t tableTag = TAG; bool has_data () const { return version != 0; } void compile_flags (const hb_aat_map_builder_t *mapper, hb_aat_map_t *map) const { const Chain<Types> *chain = &firstChain; unsigned int count = chainCount; for (unsigned int i = 0; i < count; i++) { map->chain_flags.push (chain->compile_flags (mapper)); chain = &StructAfter<Chain<Types>> (*chain); } } void apply (hb_aat_apply_context_t *c) const { if (unlikely (!c->buffer->successful)) return; c->set_lookup_index (0); const Chain<Types> *chain = &firstChain; unsigned int count = chainCount; for (unsigned int i = 0; i < count; i++) { chain->apply (c, c->plan->aat_map.chain_flags[i]); if (unlikely (!c->buffer->successful)) return; chain = &StructAfter<Chain<Types>> (*chain); } } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); if (!version.sanitize (c) || !version || !chainCount.sanitize (c)) return_trace (false); const Chain<Types> *chain = &firstChain; unsigned int count = chainCount; for (unsigned int i = 0; i < count; i++) { if (!chain->sanitize (c, version)) return_trace (false); chain = &StructAfter<Chain<Types>> (*chain); } return_trace (true); } protected: HBUINT16 version; /* Version number of the glyph metamorphosis table. * 1, 2, or 3. */ HBUINT16 unused; /* Set to 0. */ HBUINT32 chainCount; /* Number of metamorphosis chains contained in this * table. */ Chain<Types> firstChain; /* Chains. */ public: DEFINE_SIZE_MIN (8); }; struct morx : mortmorx<ExtendedTypes, HB_AAT_TAG_morx> {}; struct mort : mortmorx<ObsoleteTypes, HB_AAT_TAG_mort> {}; } /* namespace AAT */ #endif /* HB_AAT_LAYOUT_MORX_TABLE_HH */
{ "pile_set_name": "Github" }
-- SAI for Lord Thorval speech to deciples SET @ENTRY = 29196; UPDATE `creature_template` SET AIName = 'SmartAI' WHERE `entry`=@ENTRY; -- DELETE FROM `creature_ai_scripts` WHERE `creature_id`=@ENTRY; DELETE FROM `smart_scripts` WHERE `entryorguid`=@ENTRY AND `source_type`=0; DELETE FROM `smart_scripts` WHERE `entryorguid`=@ENTRY*100 AND `source_type`=9; INSERT INTO `smart_scripts` (`entryorguid`,`source_type`,`id`,`link`,`event_type`,`event_phase_mask`,`event_chance`,`event_flags`,`event_param1`,`event_param2`,`event_param3`,`event_param4`,`action_type`,`action_param1`,`action_param2`,`action_param3`,`action_param4`,`action_param5`,`action_param6`,`target_type`,`target_param1`,`target_param2`,`target_param3`,`target_x`,`target_y`,`target_z`,`target_o`,`comment`) VALUES (@ENTRY,0,0,0,1,0,100,0,60000,60000,600000,600000,80,@ENTRY*100,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - OOC - Run Script'), (@ENTRY*100,9,0,0,0,0,100,0,1000,1000,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 0'), (@ENTRY*100,9,1,0,0,0,100,0,8000,8000,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 1'), (@ENTRY*100,9,2,0,0,0,100,0,8000,8000,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 2'), (@ENTRY*100,9,3,0,0,0,100,0,8000,8000,0,0,1,3,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 3'), (@ENTRY*100,9,4,0,0,0,100,0,8000,8000,0,0,1,4,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 4'), (@ENTRY*100,9,5,0,0,0,100,0,8000,8000,0,0,1,5,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 5'), (@ENTRY*100,9,6,0,0,0,100,0,8000,8000,0,0,1,6,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 6'), (@ENTRY*100,9,7,0,0,0,100,0,8000,8000,0,0,1,7,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 7'), (@ENTRY*100,9,8,0,0,0,100,0,7000,7000,0,0,1,8,0,0,0,0,0,1,0,0,0,0,0,0,0,'Lord Thorval - Script - Say 8'); -- Creature text for Lord Thorval -- DELETE FROM creature_ai_texts WHERE entry BETWEEN -735 AND -727; DELETE FROM `creature_text` WHERE `entry`=@ENTRY; INSERT INTO `creature_text` (`entry`,`groupid`,`id`,`text`,`type`,`language`,`probability`,`emote`,`duration`,`sound`,`comment`) VALUES (@ENTRY, 0, 0, 'As disciples of blood, you strive to master the very lifeforce of your enemies.', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 1 of Speech To Deciples)'), (@ENTRY, 1, 0, 'Be it by blade or incantation, blood feeds our attacks and weakens our foes.', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 2 of Speech To Deciples)'), (@ENTRY, 2, 0, 'True masters learn to make blood serve more than just their strength in battle.', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 3 of Speech To Deciples)'), (@ENTRY, 3, 0, 'Stripping energy from our foes, both fighting and fallen, allows us to persevere where lesser beigns falls exhausted.', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 4 of Speech To Deciples)'), (@ENTRY, 4, 0, 'And every foe that falls, energy sapped and stolen, only further fuels our assault.', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 5 of Speech To Deciples)'), (@ENTRY, 5, 0, 'As masters of blood, we know battle without end...', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 6 of Speech To Deciples)'), (@ENTRY, 6, 0, 'We know hunger never to be quenched...', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 7 of Speech To Deciples)'), (@ENTRY, 7, 0, 'We know power never to be overcome...', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 8 of Speech To Deciples)'), (@ENTRY, 8, 0, 'As masters of blood, we are masters of life and death itself. Agains us, even hope falls drained and lifeless.', 12, 0, 100, 1, 0, 0, 'Lord Thorval - (Part 9 of Speech To Deciples)'); -- Remove some EAI already converted to cpp /* DELETE FROM creature_ai_scripts WHERE creature_id IN (25040,26499,30663,30918,32273); UPDATE creature_template SET AIName = '' WHERE entry IN (25040,26499,30663,30918,32273); */
{ "pile_set_name": "Github" }
/* * Copyright (c) MuleSoft, Inc. All rights reserved. http://www.mulesoft.com * The software in this package is published under the terms of the CPAL v1.0 * license, a copy of which has been included with this distribution in the * LICENSE.txt file. */ package org.mule.runtime.deployment.model.api.domain; import java.util.Optional; import java.util.Properties; import java.util.Set; import org.mule.runtime.deployment.model.api.DeployableArtifactDescriptor; import com.google.common.collect.ImmutableSet; /** * Represents the description of a domain. */ public class DomainDescriptor extends DeployableArtifactDescriptor { public static final String DEFAULT_DOMAIN_NAME = "default"; public static final String DEFAULT_CONFIGURATION_RESOURCE = "mule-domain-config.xml"; public static final String MULE_DOMAIN_CLASSIFIER = "mule-domain"; /** * Creates a new domain descriptor * * @param name domain name. Non empty. */ public DomainDescriptor(String name) { super(name); } /** * Creates a new domain descriptor * * @param name domain name. Non empty. * @param deploymentProperties deploymentProperties */ public DomainDescriptor(String name, Optional<Properties> deploymentProperties) { super(name, deploymentProperties); } @Override protected Set<String> getDefaultConfigResources() { return ImmutableSet.<String>builder().add(DEFAULT_CONFIGURATION_RESOURCE).build(); } }
{ "pile_set_name": "Github" }
// Copyright 2005-2012 Daniel James. // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #if !defined(BOOST_FUNCTIONAL_HASH_DETAIL_HASH_FLOAT_HEADER) #define BOOST_FUNCTIONAL_HASH_DETAIL_HASH_FLOAT_HEADER #include <boost/config.hpp> #if defined(BOOST_HAS_PRAGMA_ONCE) #pragma once #endif #include <boost/functional/hash/detail/float_functions.hpp> #include <boost/functional/hash/detail/limits.hpp> #include <boost/utility/enable_if.hpp> #include <boost/integer/static_log2.hpp> #include <boost/cstdint.hpp> #include <boost/assert.hpp> #include <boost/limits.hpp> #include <cstring> #if defined(BOOST_MSVC) #pragma warning(push) #if BOOST_MSVC >= 1400 #pragma warning(disable:6294) // Ill-defined for-loop: initial condition does // not satisfy test. Loop body not executed #endif #endif // Can we use fpclassify? // STLport #if defined(__SGI_STL_PORT) || defined(_STLPORT_VERSION) #define BOOST_HASH_USE_FPCLASSIFY 0 // GNU libstdc++ 3 #elif defined(__GLIBCPP__) || defined(__GLIBCXX__) # if (defined(__USE_ISOC99) || defined(_GLIBCXX_USE_C99_MATH)) && \ !(defined(macintosh) || defined(__APPLE__) || defined(__APPLE_CC__)) # define BOOST_HASH_USE_FPCLASSIFY 1 # else # define BOOST_HASH_USE_FPCLASSIFY 0 # endif // Everything else #else # define BOOST_HASH_USE_FPCLASSIFY 0 #endif namespace boost { namespace hash_detail { inline void hash_float_combine(std::size_t& seed, std::size_t value) { seed ^= value + (seed<<6) + (seed>>2); } //////////////////////////////////////////////////////////////////////// // Binary hash function // // Only used for floats with known iec559 floats, and certain values in // numeric_limits inline std::size_t hash_binary(char* ptr, std::size_t length) { std::size_t seed = 0; if (length >= sizeof(std::size_t)) { std::memcpy(&seed, ptr, sizeof(std::size_t)); length -= sizeof(std::size_t); ptr += sizeof(std::size_t); while(length >= sizeof(std::size_t)) { std::size_t buffer = 0; std::memcpy(&buffer, ptr, sizeof(std::size_t)); hash_float_combine(seed, buffer); length -= sizeof(std::size_t); ptr += sizeof(std::size_t); } } if (length > 0) { std::size_t buffer = 0; std::memcpy(&buffer, ptr, length); hash_float_combine(seed, buffer); } return seed; } template <typename Float, unsigned digits, unsigned max_exponent> struct enable_binary_hash { BOOST_STATIC_CONSTANT(bool, value = std::numeric_limits<Float>::is_iec559 && std::numeric_limits<Float>::digits == digits && std::numeric_limits<Float>::radix == 2 && std::numeric_limits<Float>::max_exponent == max_exponent); }; template <typename Float> inline std::size_t float_hash_impl(Float v, BOOST_DEDUCED_TYPENAME boost::enable_if_c< enable_binary_hash<Float, 24, 128>::value, std::size_t>::type) { return hash_binary((char*) &v, 4); } template <typename Float> inline std::size_t float_hash_impl(Float v, BOOST_DEDUCED_TYPENAME boost::enable_if_c< enable_binary_hash<Float, 53, 1024>::value, std::size_t>::type) { return hash_binary((char*) &v, 8); } template <typename Float> inline std::size_t float_hash_impl(Float v, BOOST_DEDUCED_TYPENAME boost::enable_if_c< enable_binary_hash<Float, 64, 16384>::value, std::size_t>::type) { return hash_binary((char*) &v, 10); } template <typename Float> inline std::size_t float_hash_impl(Float v, BOOST_DEDUCED_TYPENAME boost::enable_if_c< enable_binary_hash<Float, 113, 16384>::value, std::size_t>::type) { return hash_binary((char*) &v, 16); } //////////////////////////////////////////////////////////////////////// // Portable hash function // // Used as a fallback when the binary hash function isn't supported. template <class T> inline std::size_t float_hash_impl2(T v) { boost::hash_detail::call_frexp<T> frexp; boost::hash_detail::call_ldexp<T> ldexp; int exp = 0; v = frexp(v, &exp); // A postive value is easier to hash, so combine the // sign with the exponent and use the absolute value. if(v < 0) { v = -v; exp += limits<T>::max_exponent - limits<T>::min_exponent; } v = ldexp(v, limits<std::size_t>::digits); std::size_t seed = static_cast<std::size_t>(v); v -= static_cast<T>(seed); // ceiling(digits(T) * log2(radix(T))/ digits(size_t)) - 1; std::size_t const length = (limits<T>::digits * boost::static_log2<limits<T>::radix>::value + limits<std::size_t>::digits - 1) / limits<std::size_t>::digits; for(std::size_t i = 0; i != length; ++i) { v = ldexp(v, limits<std::size_t>::digits); std::size_t part = static_cast<std::size_t>(v); v -= static_cast<T>(part); hash_float_combine(seed, part); } hash_float_combine(seed, exp); return seed; } #if !defined(BOOST_HASH_DETAIL_TEST_WITHOUT_GENERIC) template <class T> inline std::size_t float_hash_impl(T v, ...) { typedef BOOST_DEDUCED_TYPENAME select_hash_type<T>::type type; return float_hash_impl2(static_cast<type>(v)); } #endif } } #if BOOST_HASH_USE_FPCLASSIFY #include <boost/config/no_tr1/cmath.hpp> namespace boost { namespace hash_detail { template <class T> inline std::size_t float_hash_value(T v) { #if defined(fpclassify) switch (fpclassify(v)) #elif BOOST_HASH_CONFORMANT_FLOATS switch (std::fpclassify(v)) #else using namespace std; switch (fpclassify(v)) #endif { case FP_ZERO: return 0; case FP_INFINITE: return (std::size_t)(v > 0 ? -1 : -2); case FP_NAN: return (std::size_t)(-3); case FP_NORMAL: case FP_SUBNORMAL: return float_hash_impl(v, 0); default: BOOST_ASSERT(0); return 0; } } } } #else // !BOOST_HASH_USE_FPCLASSIFY namespace boost { namespace hash_detail { template <class T> inline bool is_zero(T v) { #if !defined(__GNUC__) return v == 0; #else // GCC's '-Wfloat-equal' will complain about comparing // v to 0, but because it disables warnings for system // headers it won't complain if you use std::equal_to to // compare with 0. Resulting in this silliness: return std::equal_to<T>()(v, 0); #endif } template <class T> inline std::size_t float_hash_value(T v) { return boost::hash_detail::is_zero(v) ? 0 : float_hash_impl(v, 0); } } } #endif // BOOST_HASH_USE_FPCLASSIFY #undef BOOST_HASH_USE_FPCLASSIFY #if defined(BOOST_MSVC) #pragma warning(pop) #endif #endif
{ "pile_set_name": "Github" }
/*--------------------------------------------------------------------------- THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. Copyright (C) 1999 - 2000 Microsoft Corporation. All rights reserved. fcopy.cpp This program demonstrates how to use file mappings to implement a file-copy program. It can copy any size file on Windows NT. The size of the file mapping view is a multiple of the system's allocation size. With relatively large views, this program runs faster than if it used many small views. The size of the view can be adjusted up or down by changing the ALLOCATION_MULTIPLIER constant. The only recommendation is that the view size must be no more than can fit into the process's address space. Note: Supports 64-bit file systems. ---------------------------------------------------------------------------*/ #define WIN32_LEAN_AND_MEAN #include <windows.h> #include <stdio.h> #if defined (DEBUG) #define DEBUG_PRINT(X) printf(X) #else #define DEBUG_PRINT(X) #endif #include <crtdbg.h> // maximum view size DWORD dwMaxViewSize ; // multiplying the system allocation size by the following constant // determines the maximum view size const WORD ALLOCATION_MULTIPLIER = 1 ; const int SUCCESS = 0; /* for return value from main() */ const int FAILURE = 1; /* for return value from main() */ /*--------------------------------------------------------------------------- main (argc, argv) The main program. Takes the command line arguments, copies the source file to the destination file. Parameters argc Count of command-line arguments, including the name of the program. argv Array of pointers to strings that contain individual command-line arguments. Returns Zero if program executed successfully, non-zero otherwise. ---------------------------------------------------------------------------*/ int main (int argc, char **argv) { int fResult = FAILURE; SYSTEM_INFO siSystemInfo ; DWORD dwMaxViewSize ; ULARGE_INTEGER liSrcFileSize, liBytesRemaining, liMapSize, liOffset; HANDLE hSrcFile = INVALID_HANDLE_VALUE, hDstFile = INVALID_HANDLE_VALUE, hSrcMap = 0, hDstMap = 0; BYTE * pSrc = 0, * pDst = 0; char * pszSrcFileName = 0, * pszDstFileName = 0; if (argc != 3) { printf("usage: fcopy <srcfile> <dstfile>\n"); return (FAILURE); } pszSrcFileName = argv[argc-2]; // Src is second to last argument pszDstFileName = argv[argc-1]; // Dst is the last argument // Obtain the system's allocation granularity, then multiply it by an // arbitrary factor to obtain the maximum view size GetSystemInfo(&siSystemInfo); dwMaxViewSize = siSystemInfo.dwAllocationGranularity * ALLOCATION_MULTIPLIER; /* Steps to open and access a file's contents: 1) Open the file, 2) Create a mapping of the file, 3) Map a view of the file. This yields a pointer to the file's contents, which can then be used to access the file, just as if it's contents were in a memory buffer. For the source file, open and map it as read only; for the destination file, open and map it as read-write. We allow other processes to read the source file while we're copying it, but do not allow access to the destination file since we're writing it. */ // Open the source and destination files hSrcFile = CreateFile (pszSrcFileName, GENERIC_READ, FILE_SHARE_READ, 0, OPEN_EXISTING, 0, 0); if (INVALID_HANDLE_VALUE == hSrcFile) { printf("fcopy: couldn't open source file.\n"); goto DONE; } hDstFile = CreateFile (pszDstFileName, GENERIC_READ|GENERIC_WRITE, 0, 0, CREATE_ALWAYS, 0, 0); if (INVALID_HANDLE_VALUE == hDstFile) { printf("fcopy: couldn't create destination file.\n"); goto DONE; } // Need source file's size to know how big to make the destination mapping. liSrcFileSize.LowPart = GetFileSize(hSrcFile, &liSrcFileSize.HighPart); if ( (-1 == liSrcFileSize.LowPart) && (GetLastError() != NO_ERROR) ) { DEBUG_PRINT("couldn't get size of source file.\n"); goto DONE; } /* Special case: If the source file is zero bytes, we don't map it because there's no need to and CreateFileMapping cannot map a zero-length file. But since we've created the destination, we've successfully "copied" the source. */ if (0 == liSrcFileSize.QuadPart) { fResult = SUCCESS; goto DONE; } /* Map the source and destination files. A mapping size of zero means the whole file will be mapped. */ hSrcMap = CreateFileMapping (hSrcFile, 0, PAGE_READONLY, 0, 0, 0); if (!hSrcMap) { DEBUG_PRINT("couldn't map source file\n"); goto DONE; } hDstMap = CreateFileMapping (hDstFile, 0, PAGE_READWRITE, liSrcFileSize.HighPart, liSrcFileSize.LowPart, 0); if (!hDstMap) { DEBUG_PRINT("couldn't map destination file.\n"); goto DONE; } /* Now that we have the source and destination mapping objects, map views of the source and destination files, and do the file copy. To minimize the amount of memory consumed for large files and make it possible to copy files that couldn't be mapped into our virtual address space entirely (those over 2GB), we limit the source and destination views to the smaller of the file size or a specified maximum view size (dwMaxViewSize, which is ALLOCATION_MULTIPLIER times the system's allocation size). If the file is smaller than the max view size, we'll just map and copy it. Otherwise, we'll map a portion of the file, copy it, then map the next portion, copy it, etc. until the entire file is copied. MAP_SIZE is 32 bits because MapViewOfFile requires a 32-bit value for the size of the view. This makes sense because a Win32 process's address space is 4GB, of which only 2GB (2^31) bytes may be used by the process. However, for the sake of making 64-bit arithmetic work below for file offets, we need to make sure that all 64 bits of liMapSize are initialized correctly. Note structured exception handling is used in case a MapViewOfFile call failed. That should never happen in this program, but in case it does, we should handle it. Since the possibility is so remote, it is faster to handle the exception when it occurs rather than test for failure in the loop. */ __try { liBytesRemaining.QuadPart = liSrcFileSize.QuadPart; // stan bug fix liMapSize.QuadPart = dwMaxViewSize; // Make sure that the arithmetic below is correct during debugging. _ASSERT(liMapSize.HighPart == 0); do { liMapSize.QuadPart = min(liBytesRemaining.QuadPart, liMapSize.QuadPart); liOffset.QuadPart = liSrcFileSize.QuadPart - liBytesRemaining.QuadPart; pSrc = (BYTE *)MapViewOfFile (hSrcMap, FILE_MAP_READ, liOffset.HighPart, liOffset.LowPart, liMapSize.LowPart); pDst = (BYTE *)MapViewOfFile (hDstMap, FILE_MAP_WRITE, liOffset.HighPart, liOffset.LowPart, liMapSize.LowPart); CopyMemory (pDst, pSrc, liMapSize.LowPart); UnmapViewOfFile (pSrc); UnmapViewOfFile (pDst); liBytesRemaining.QuadPart -= liMapSize.QuadPart; } while (liBytesRemaining.QuadPart > 0); fResult = SUCCESS; } __except (EXCEPTION_EXECUTE_HANDLER) { /* Only way we should get here is if a MapViewOfFile failed. That shouldn't happen, but in case it does clean up the resources. */ if (pSrc) UnmapViewOfFile (pSrc); if (pDst) UnmapViewOfFile (pDst); } DONE: /* Clean up all outstanding resources. Note views are already unmapped. */ if (hDstMap) CloseHandle (hDstMap); if (hDstFile != INVALID_HANDLE_VALUE) CloseHandle (hDstFile); if (hSrcMap) CloseHandle (hSrcMap); if (hSrcFile != INVALID_HANDLE_VALUE) CloseHandle (hSrcFile); // Report to user only if a problem occurred. if (fResult != SUCCESS) { printf("fcopy: copying failed.\n"); DeleteFile (pszDstFileName); } return (fResult); }
{ "pile_set_name": "Github" }
string = {} --[[ Returns the internal numerical codes of the characters s[i], s[i+1], ..., s[j]. The default value for i is 1; the default value for j is i. Note that numerical codes are not necessarily portable across platforms. --]] function string.byte( s, i, j ) return __std.Number end --[[ Receives zero or more integers. Returns a string with length equal to the number of arguments, in which each character has the internal numerical code equal to its corresponding argument. Note that numerical codes are not necessarily portable across platforms. --]] function string.char( ... ) return __std.String end --[[ Returns a string containing a binary representation of the given function, so that a later loadstring on this string returns a copy of the function. func must be a Lua function without upvalues. --]] function string.dump( func ) return __std.String end --[[ Looks for the first match of pattern in the string s. If it finds a match, then find returns the indices of s where this occurrence starts and ends; otherwise, it returns nil. A third, optional numerical argument init specifies where to start the search; its default value is 1 and can be negative. A value of true as a fourth, optional argument plain turns off the pattern matching facilities, so the function does a plain "find substring" operation, with no characters in pattern being considered "magic". Note that if plain is given, then init must be given as well. If the pattern has captures, then in a successful match the captured values are also returned, after the two indices. --]] function string.find( s, pattern, init, plain ) return __std.Number end --[[ Returns a formatted version of its variable number of arguments following the description given in its first argument (which must be a string). The format string follows the same rules as the printf family of standard C functions. The only differences are that the options/modifiers *, l, L, n, p, and h are not supported and that there is an extra option, q. The q option formats a string in a form suitable to be safely read back by the Lua interpreter: the string is written between double quotes, and all double quotes, newlines, embedded zeros, and backslashes in the string are correctly escaped when written. For instance, the call string.format('%q', 'a string with "quotes" and \n new line') will produce the string: "a string with \"quotes\" and \ new line" The options c, d, E, e, f, g, G, i, o, u, X, and x all expect a number as argument, whereas q and s expect a string. This function does not accept string values containing embedded zeros, except as arguments to the q option. --]] function string.format( formatstring, ... ) return __std.String end --[[ Returns an iterator function that, each time it is called, returns the next captures from pattern over string s. If pattern specifies no captures, then the whole match is produced in each call. As an example, the following loop s = "hello world from Lua" for w in string.gmatch(s, "%a+") do print(w) end will iterate over all the words from string s, printing one per line. The next example collects all pairs key=value from the given string into a table: t = {} s = "from=world, to=Lua" for k, v in string.gmatch(s, "(%w+)=(%w+)") do t[k] = v end For this function, a '^' at the start of a pattern does not work as an anchor, as this would prevent the iteration. --]] function string.gmatch( s, pattern ) return __std.String end --[[ Returns a copy of s in which all (or the first n, if given) occurrences of the pattern have been replaced by a replacement string specified by repl, which can be a string, a table, or a function. gsub also returns, as its second value, the total number of matches that occurred. If repl is a string, then its value is used for replacement. The character % works as an escape character: any sequence in repl of the form %n, with n between 1 and 9, stands for the value of the n-th captured substring (see below). The sequence %0 stands for the whole match. The sequence %% stands for a single %. If repl is a table, then the table is queried for every match, using the first capture as the key; if the pattern specifies no captures, then the whole match is used as the key. If repl is a function, then this function is called every time a match occurs, with all captured substrings passed as arguments, in order; if the pattern specifies no captures, then the whole match is passed as a sole argument. If the value returned by the table query or by the function call is a string or a number, then it is used as the replacement string; otherwise, if it is false or nil, then there is no replacement (that is, the original match is kept in the string). Here are some examples: x = string.gsub("hello world", "(%w+)", "%1 %1") --> x="hello hello world world" x = string.gsub("hello world", "%w+", "%0 %0", 1) --> x="hello hello world" x = string.gsub("hello world from Lua", "(%w+)%s*(%w+)", "%2 %1") --> x="world hello Lua from" x = string.gsub("home = $HOME, user = $USER", "%$(%w+)", os.getenv) --> x="home = /home/roberto, user = roberto" x = string.gsub("4+5 = $return 4+5$", "%$(.-)%$", function (s) return loadstring(s)() end) --> x="4+5 = 9" local t = {name="lua", version="5.1"} x = string.gsub("$name-$version.tar.gz", "%$(%w+)", t) --> x="lua-5.1.tar.gz" --]] function string.gsub( s, pattern, repl, n ) return __std.String end --[[ Receives a string and returns its length. The empty string "" has length 0. Embedded zeros are counted, so "a\000bc\000" has length 5. --]] function string.len( s ) return __std.Number end --[[ Receives a string and returns a copy of this string with all uppercase letters changed to lowercase. All other characters are left unchanged. The definition of what an uppercase letter is depends on the current locale. --]] function string.lower( s ) return __std.String end --[[ Looks for the first match of pattern in the string s. If it finds one, then match returns the captures from the pattern; otherwise it returns nil. If pattern specifies no captures, then the whole match is returned. A third, optional numerical argument init specifies where to start the search; its default value is 1 and can be negative. --]] function string.match( s, pattern, init ) return __std.String end --[[ Returns a string that is the concatenation of n copies of the string s. --]] function string.rep( s, n ) return __std.String end --[[ Returns a string that is the string s reversed. --]] function string.reverse( s ) return __std.String end --[[ Returns the substring of s that starts at i and continues until j; i and j can be negative. If j is absent, then it is assumed to be equal to -1 (which is the same as the string length). In particular, the call string.sub(s,1,j) returns a prefix of s with length j, and string.sub(s, -i) returns a suffix of s with length i. --]] function string.sub( s, i, j ) return __std.String end --[[ Receives a string and returns a copy of this string with all lowercase letters changed to uppercase. All other characters are left unchanged. The definition of what a lowercase letter is depends on the current locale. --]] function string.upper (s) return __std.String end
{ "pile_set_name": "Github" }
rule o26bb_07c3d4e1c6000b12 { meta: copyright="Copyright (c) 2014-2018 Support Intelligence Inc, All Rights Reserved." engine="saphire/1.3.8 divinorum/0.9992 icewater/0.4" viz_url="http://icewater.io/en/cluster/query?h64=o26bb.07c3d4e1c6000b12" cluster="o26bb.07c3d4e1c6000b12" cluster_size="5" filetype = "" tlp = "amber" version = "icewater snowflake" author = "Rick Wesson (@wessorh) [email protected]" date = "20180911" license = "RIL-1.0 [Rick's Internet License]" family="malicious gamehack unsafe" md5_hashes="['5673d98d4f97e773cdde9125bffff039e1540359','f8fff7765b6858bba1d524805e511ffb061fa664','f588035c2a5b71b800cabc50e3f631033902ab91']" cluster_members="http://icewater.io/en/cluster/detail?h64=o26bb.07c3d4e1c6000b12" strings: $hex_string = { 45018bcf2bca415333dbd1e93bfa1bfff7d723f976118d6424008a0a8d5202880843403bdf72f3c6460b085beb3980f9100f85fc000000837c24180074058d55 } condition: filesize > 1048576 and filesize < 4194304 and $hex_string }
{ "pile_set_name": "Github" }
/****************************************************************************** * * Copyright(c) 2007 - 2011 Realtek Corporation. All rights reserved. * * This program is free software; you can redistribute it and/or modify it * under the terms of version 2 of the GNU General Public License as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * ******************************************************************************/ #ifndef __ODM_PRECOMP_H__ #define __ODM_PRECOMP_H__ /* 2 Config Flags and Structs - defined by each ODM Type */ #include <osdep_service.h> #include <drv_types.h> #include <hal_intf.h> /* 2 Hardware Parameter Files */ #include "Hal8723UHWImg_CE.h" /* 2 OutSrc Header Files */ #include "odm.h" #include "odm_HWConfig.h" #include "odm_debug.h" #include "odm_RegDefine11N.h" #include "HalDMOutSrc8723A.h" /* for IQK,LCK,Power-tracking */ #include "rtl8723a_hal.h" #include "odm_interface.h" #include "odm_reg.h" #include "HalHWImg8723A_MAC.h" #include "HalHWImg8723A_RF.h" #include "HalHWImg8723A_BB.h" #include "HalHWImg8723A_FW.h" #include "odm_RegConfig8723A.h" #endif /* __ODM_PRECOMP_H__ */
{ "pile_set_name": "Github" }
<!DOCTYPE html> <style> td { text-align: center; } .parent { box-sizing: border-box; width: 60px; height: 60px; border: solid; border-width: 1px 2px 3px 4px; padding: 0px 4px 8px 12px; position: relative; } .child { position: absolute; width: 10px; height: 10px; background: green; } .top-left { top: 0; left: 12px; } .top-right { top: 0; right: 4px; } .bottom-left { bottom: 8px; left: 12px; } .bottom-right { bottom: 8px; right: 4px; } </style> <table> <tr> <td></td> <td colspan=2>LTR</td> <td colspan=2>RTL</td> </tr> <tr> <td></td> <td>Y</td> <td>X</td> <td>Y</td> <td>X</td> </tr> <tr> <td>HTB</td> <td> <div class="parent" style="writing-mode: horizontal-tb; direction: ltr; overflow-y: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: horizontal-tb; direction: ltr; overflow-x: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: horizontal-tb; direction: rtl; overflow-y: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: horizontal-tb; direction: rtl; overflow-x: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> </tr> <tr> <td>VRL</td> <td> <div class="parent" style="writing-mode: vertical-rl; direction: ltr; overflow-y: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: vertical-rl; direction: ltr; overflow-x: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: vertical-rl; direction: rtl; overflow-y: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: vertical-rl; direction: rtl; overflow-x: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> </tr> <tr> <td>VLR</td> <td> <div class="parent" style="writing-mode: vertical-lr; direction: ltr; overflow-y: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: vertical-lr; direction: ltr; overflow-x: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: vertical-lr; direction: rtl; overflow-y: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> <td> <div class="parent" style="writing-mode: vertical-lr; direction: rtl; overflow-x: scroll;"> <div class="child top-left"></div> <div class="child top-right"></div> <div class="child bottom-left"></div> <div class="child bottom-right"></div> </div> </td> </tr> </table>
{ "pile_set_name": "Github" }
<Type Name="UnixTransport" FullName="Beagrep.UnixTransport"> <TypeSignature Language="C#" Value="public class UnixTransport : Beagrep.Transport" /> <AssemblyInfo> <AssemblyName>Beagrep</AssemblyName> <AssemblyVersion>0.0.0.0</AssemblyVersion> </AssemblyInfo> <Base> <BaseTypeName>Beagrep.Transport</BaseTypeName> </Base> <Interfaces /> <Members> <Member MemberName=".ctor"> <MemberSignature Language="C#" Value="public UnixTransport ();" /> <MemberType>Constructor</MemberType> <Parameters /> <Docs> <summary>To be added.</summary> <remarks>To be added.</remarks> </Docs> </Member> <Member MemberName=".ctor"> <MemberSignature Language="C#" Value="public UnixTransport (string client_name);" /> <MemberType>Constructor</MemberType> <Parameters> <Parameter Name="client_name" Type="System.String" /> </Parameters> <Docs> <param name="client_name">To be added.</param> <summary>To be added.</summary> <remarks>To be added.</remarks> </Docs> </Member> </Members> <Docs> <summary>To be added.</summary> <remarks>To be added.</remarks> </Docs> </Type>
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: bddcbefb926634718829a8fc1ec5df6e timeCreated: 1438145546 licenseType: Free ShaderImporter: defaultTextures: [] userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
import argparse import os import numpy as np import tensorflow as tf from matplotlib import pyplot as plt from PIL import Image import FCRN.models as models def predict(model_data_path, image_path): # Default input size height = 228 width = 304 channels = 3 batch_size = 1 # Read image img = Image.open(image_path) img = img.resize([width,height], Image.ANTIALIAS) img = np.array(img).astype('float32') img = np.expand_dims(np.asarray(img), axis = 0) # Create a placeholder for the input image input_node = tf.placeholder(tf.float32, shape=(None, height, width, channels)) # Construct the network net = models.ResNet50UpProj({'data': input_node}, batch_size, 1, False) with tf.Session() as sess: # Load the converted parameters print('Loading the model') # Use to load from ckpt file saver = tf.train.Saver() saver.restore(sess, model_data_path) # Use to load from npy file #net.load(model_data_path, sess) # Evalute the network for the given image pred = sess.run(net.get_output(), feed_dict={input_node: img}) # Plot result fig = plt.figure() ii = plt.imshow(pred[0,:,:,0], interpolation='nearest') fig.colorbar(ii) plt.show() return pred def main(): # Parse arguments parser = argparse.ArgumentParser() parser.add_argument('model_path', help='Converted parameters for the model') parser.add_argument('image_paths', help='Directory of images to predict') args = parser.parse_args() # Predict the image pred = predict(args.model_path, args.image_paths) os._exit(0) if __name__ == '__main__': main()
{ "pile_set_name": "Github" }
''' Reference tzinfo implementations from the Python docs. Used for testing against as they are only correct for the years 1987 to 2006. Do not use these for real code. ''' from datetime import tzinfo, timedelta, datetime from pytz import utc, UTC, HOUR, ZERO # A class building tzinfo objects for fixed-offset time zones. # Note that FixedOffset(0, "UTC") is a different way to build a # UTC tzinfo object. class FixedOffset(tzinfo): """Fixed offset in minutes east from UTC.""" def __init__(self, offset, name): self.__offset = timedelta(minutes = offset) self.__name = name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return ZERO # A class capturing the platform's idea of local time. import time as _time STDOFFSET = timedelta(seconds = -_time.timezone) if _time.daylight: DSTOFFSET = timedelta(seconds = -_time.altzone) else: DSTOFFSET = STDOFFSET DSTDIFF = DSTOFFSET - STDOFFSET class LocalTimezone(tzinfo): def utcoffset(self, dt): if self._isdst(dt): return DSTOFFSET else: return STDOFFSET def dst(self, dt): if self._isdst(dt): return DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 Local = LocalTimezone() # A complete implementation of current DST rules for major US time zones. def first_sunday_on_or_after(dt): days_to_go = 6 - dt.weekday() if days_to_go: dt += timedelta(days_to_go) return dt # In the US, DST starts at 2am (standard time) on the first Sunday in April. DSTSTART = datetime(1, 4, 1, 2) # and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. # which is the first Sunday on or after Oct 25. DSTEND = datetime(1, 10, 25, 1) class USTimeZone(tzinfo): def __init__(self, hours, reprname, stdname, dstname): self.stdoffset = timedelta(hours=hours) self.reprname = reprname self.stdname = stdname self.dstname = dstname def __repr__(self): return self.reprname def tzname(self, dt): if self.dst(dt): return self.dstname else: return self.stdname def utcoffset(self, dt): return self.stdoffset + self.dst(dt) def dst(self, dt): if dt is None or dt.tzinfo is None: # An exception may be sensible here, in one or both cases. # It depends on how you want to treat them. The default # fromutc() implementation (called by the default astimezone() # implementation) passes a datetime with dt.tzinfo is self. return ZERO assert dt.tzinfo is self # Find first Sunday in April & the last in October. start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) # Can't compare naive to aware objects, so strip the timezone from # dt first. if start <= dt.replace(tzinfo=None) < end: return HOUR else: return ZERO Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") Central = USTimeZone(-6, "Central", "CST", "CDT") Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
{ "pile_set_name": "Github" }
name: cardano-sl-utxo version: 3.2.0 synopsis: Abstract definitions of UTxO based accounting -- description: homepage: https://github.com/input-output-hk/cardano-sl/#readme license: Apache-2.0 license-files: LICENSE NOTICE author: IOHK Engineering Team maintainer: [email protected] copyright: 2017-2018 IOHK category: Testing build-type: Simple extra-source-files: ChangeLog.md cabal-version: >=1.10 library exposed-modules: UTxO.DSL UTxO.Bootstrap UTxO.Context UTxO.Crypto UTxO.Generator UTxO.IntTrans UTxO.Translate UTxO.Util UTxO.Verify Data.Validated -- other-modules: build-depends: base >=4.10 && <4.12 , QuickCheck , cardano-sl , cardano-sl-binary , cardano-sl-chain , cardano-sl-chain-test , cardano-sl-client , cardano-sl-core , cardano-sl-core-test , cardano-sl-crypto , cardano-sl-db , cardano-sl-util , constraints , containers , cryptonite , data-default >= 0.7 , formatting , lens , mtl , reflection >= 2.1 , safecopy , serokell-util , universum , unordered-containers , vector hs-source-dirs: src default-language: Haskell2010 default-extensions: BangPatterns ConstraintKinds DeriveGeneric FlexibleContexts FlexibleInstances GADTs InstanceSigs LambdaCase MultiParamTypeClasses MultiWayIf NoImplicitPrelude OverloadedStrings RankNTypes RecordWildCards ScopedTypeVariables StandaloneDeriving TypeFamilies
{ "pile_set_name": "Github" }
// !$*UTF8*$! { archiveVersion = 1; classes = { }; objectVersion = 46; objects = { /* Begin PBXBuildFile section */ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; /* End PBXBuildFile section */ /* Begin PBXCopyFilesBuildPhase section */ 9705A1C41CF9048500538489 /* Embed Frameworks */ = { isa = PBXCopyFilesBuildPhase; buildActionMask = 2147483647; dstPath = ""; dstSubfolderSpec = 10; files = ( ); name = "Embed Frameworks"; runOnlyForDeploymentPostprocessing = 0; }; /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; }; 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; 97C146F21CF9000F007C117D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; }; 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; }; 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ 97C146EB1CF9000F007C117D /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */, 9740EEB21CF90195004384FC /* Debug.xcconfig */, 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, 9740EEB31CF90195004384FC /* Generated.xcconfig */, ); name = Flutter; sourceTree = "<group>"; }; 97C146E51CF9000F007C117D = { isa = PBXGroup; children = ( 9740EEB11CF90186004384FC /* Flutter */, 97C146F01CF9000F007C117D /* Runner */, 97C146EF1CF9000F007C117D /* Products */, CF3B75C9A7D2FA2A4C99F110 /* Frameworks */, ); sourceTree = "<group>"; }; 97C146EF1CF9000F007C117D /* Products */ = { isa = PBXGroup; children = ( 97C146EE1CF9000F007C117D /* Runner.app */, ); name = Products; sourceTree = "<group>"; }; 97C146F01CF9000F007C117D /* Runner */ = { isa = PBXGroup; children = ( 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */, 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */, 97C146FA1CF9000F007C117D /* Main.storyboard */, 97C146FD1CF9000F007C117D /* Assets.xcassets */, 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */, 97C147021CF9000F007C117D /* Info.plist */, 97C146F11CF9000F007C117D /* Supporting Files */, 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */, 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */, ); path = Runner; sourceTree = "<group>"; }; 97C146F11CF9000F007C117D /* Supporting Files */ = { isa = PBXGroup; children = ( 97C146F21CF9000F007C117D /* main.m */, ); name = "Supporting Files"; sourceTree = "<group>"; }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ 97C146ED1CF9000F007C117D /* Runner */ = { isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( 9740EEB61CF901F6004384FC /* Run Script */, 97C146EA1CF9000F007C117D /* Sources */, 97C146EB1CF9000F007C117D /* Frameworks */, 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, ); buildRules = ( ); dependencies = ( ); name = Runner; productName = Runner; productReference = 97C146EE1CF9000F007C117D /* Runner.app */; productType = "com.apple.product-type.application"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { LastUpgradeCheck = 1020; ORGANIZATIONNAME = ""; TargetAttributes = { 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; }; }; }; buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */; compatibilityVersion = "Xcode 9.3"; developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( en, Base, ); mainGroup = 97C146E51CF9000F007C117D; productRefGroup = 97C146EF1CF9000F007C117D /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( 97C146ED1CF9000F007C117D /* Runner */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ 97C146EC1CF9000F007C117D /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */, 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */, 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */, 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputPaths = ( ); name = "Thin Binary"; outputPaths = ( ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputPaths = ( ); name = "Run Script"; outputPaths = ( ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ 97C146EA1CF9000F007C117D /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */, 97C146F31CF9000F007C117D /* main.m in Sources */, 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin PBXVariantGroup section */ 97C146FA1CF9000F007C117D /* Main.storyboard */ = { isa = PBXVariantGroup; children = ( 97C146FB1CF9000F007C117D /* Base */, ); name = Main.storyboard; sourceTree = "<group>"; }; 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = { isa = PBXVariantGroup; children = ( 97C147001CF9000F007C117D /* Base */, ); name = LaunchScreen.storyboard; sourceTree = "<group>"; }; /* End PBXVariantGroup section */ /* Begin XCBuildConfiguration section */ 249021D3217E4FDB00AE95B9 /* Profile */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 9.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Profile; }; 249021D4217E4FDB00AE95B9 /* Profile */ = { isa = XCBuildConfiguration; baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", ); INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", ); PRODUCT_BUNDLE_IDENTIFIER = {{iosIdentifier}}; PRODUCT_NAME = "$(TARGET_NAME)"; VERSIONING_SYSTEM = "apple-generic"; }; name = Profile; }; 97C147031CF9000F007C117D /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = dwarf; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_DYNAMIC_NO_PIC = NO; GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", "$(inherited)", ); GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 9.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; 97C147041CF9000F007C117D /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 9.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Release; }; 97C147061CF9000F007C117D /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", ); INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", ); PRODUCT_BUNDLE_IDENTIFIER = {{iosIdentifier}}; PRODUCT_NAME = "$(TARGET_NAME)"; VERSIONING_SYSTEM = "apple-generic"; }; name = Debug; }; 97C147071CF9000F007C117D /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", ); INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", ); PRODUCT_BUNDLE_IDENTIFIER = {{iosIdentifier}}; PRODUCT_NAME = "$(TARGET_NAME)"; VERSIONING_SYSTEM = "apple-generic"; }; name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = { isa = XCConfigurationList; buildConfigurations = ( 97C147031CF9000F007C117D /* Debug */, 97C147041CF9000F007C117D /* Release */, 249021D3217E4FDB00AE95B9 /* Profile */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = { isa = XCConfigurationList; buildConfigurations = ( 97C147061CF9000F007C117D /* Debug */, 97C147071CF9000F007C117D /* Release */, 249021D4217E4FDB00AE95B9 /* Profile */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; /* End XCConfigurationList section */ }; rootObject = 97C146E61CF9000F007C117D /* Project object */; }
{ "pile_set_name": "Github" }
//// compiled primitives for tables COMPILE_FN(table, compiledfn_table, "()", return mkref(new_table()); ) COMPILE_FN(table_set, compiledfn_table_set, "($table $key $val)", cell* table = lookup("$table"); cell* key = lookup("$key"); cell* val = lookup("$val"); if (is_table(table)) put(table, key, val); else RAISE << "can't set in a non-table: " << table << '\n'; return mkref(val); ) COMPILE_FN(table_get, compiledfn_table_get, "($table $key)", cell* table = lookup("$table"); cell* key = lookup("$key"); return mkref(get(table, key)); ) COMPILE_FN(table_to_list, compiledfn_table_to_list, "($table)", cell_map table = to_table(lookup("$table"))->value; cell* result = new_cell(); cell* curr = result; for (cell_map::iterator p = table.begin(); p != table.end(); ++p) { if (!p->second) continue; add_cons(curr, new_cons(p->first, new_cons(p->second))); curr=cdr(curr); } return drop_ptr(result); ) COMPILE_FN(table_length, compiledfn_table_length, "($table)", cell_map table = to_table(lookup("$table"))->value; return mkref(new_num((long)table.size())); )
{ "pile_set_name": "Github" }
# The Lambda Trilogy > Note, deployment instructions are at the bottom of this readme. The three states of AWS Lambda are something that has been discussed by many serverless heroes since their invention. This is probably the most controversial subject in all of serverless so I am not going to tell you which of the three is the best because like everything you need to adapt the right implementation to fit your context! Some examples from [Paul Swail](https://twitter.com/paulswail), [Yan Cui](https://twitter.com/theburningmonk), [Jeremy Daly](https://twitter.com/jeremy_daly) and others: - [Jeremy Daly & Paul Swail Serverless Chats #41](https://www.serverlesschats.com/41) - [Jeremy Daly Off By None](https://www.jeremydaly.com/newsletter-issue-63/) - [Yan Cui Presentation (slide 41 on)](https://www.slideshare.net/theburningmonk/beware-the-potholes-on-the-road-to-serverless-224107000) - [Yan Cui re:Invent 2019](https://d1.awsstatic.com/events/reinvent/2019/REPEAT_1_How_to_refactor_a_monolith_to_serverless_in_8_steps_API310-R1.pdf) - [Yan Cui Hackernoon - monolithic or single purpose functions?](https://hackernoon.com/aws-lambda-should-you-have-few-monolithic-functions-or-many-single-purposed-functions-8c3872d4338f) - [Yan Cui monoliths vs cold starts](https://theburningmonk.com/2018/02/aws-lambda-monolithic-functions-wont-help-you-with-cold-starts/) - [J D Hollis medium.com](https://medium.com/statics-and-dynamics/should-i-use-a-single-monolithic-lambda-function-or-multiple-lambda-functions-with-api-gateway-d99b0230f1e7) - [Ryanne Dolan medium.com](https://medium.com/@ryannedolan/aws-lambda-and-the-monolith-a0eb2d1516ef) The three states are: ![arch](img/the-single-purpose-function.png) ### Description This is the purest of all the serverless patterns. Each lambda does one unique function and the code is in its own file. ### Pros - Maximum code reusability - Forces you to write more testable code - Introduces lowest cognitive burden for developers making changes to that individual function - Easier to optimize your lambda execution times and by extension costs ### Cons - Only works for fully event driven architectures - Seeing the bigger picture, congnitive burden increases as system wide changes are talked about - Maintenance as it grows (how do you make sure 7000 lambdas have no code vulnerabilities?) ![arch](img/the-fat-lambda.png) ### Description This is a compromise option where we can still have individual lambdas but we group the actual code together in one (or more) files. You would decide what goes into a file based on low coupling, high cohesion arguments like in traditional development. ### Pros - Related logic is grouped together making your code easier to see the bigger picture - Code can easily be shared between lambda functions without needing things like layers - Security footprint reduced as updating one file can update many lambda functions ### Cons - How big is too big? Every extra byte of code added slows your lambda cold start times. - Increased blast radius of changes. Now one line of code being changed could bring down a section of your infrastructure instead of one lambda. ![arch](img/the-lambda-lith.png) ### Description This is using the lambda runtime container like a docker container. You use a web framework like Flask or Express and put them inside the lambda, then have your api gateway pass all requests through to the lambda and have that framework process the request. ### Pros - You can have an identical local development experience to deployed since you are using no AWS specific features - The code could be moved to Fargate later if it got too big for lambda with minimal changes (or another cloud) - Developers already know these frameworks ### Cons - Is this really what Lambda excels at? The larger project sizes will increase cold start times and there will be restrictions on incoming/outgoing payload sizes - Higher exposure to cold starts as the lambda will spend longer processing events - Lower levels of code reuse as probably still building the traditional ball of mud - Adapters required to make existing frameworks work with lambda. These are in various states of maturity and are another potential vulnerability in your app. ## Deconstructing The Lambda Trilogy If you want a walkthrough of the theory, the code and finally a demo of the deployed implementation check out: [![Alt text](https://img.youtube.com/vi/tHD3i06Z6gU/0.jpg)](https://www.youtube.com/watch?v=tHD3i06Z6gU) ## What's In This CDK Pattern? I have bundled fully TypeScript and fully Python versions (including the lambdas) for all 3 lambda states inside this pattern because most of the logic takes place outside the AWS CDK infrastructure code. The logic that I have used to demonstrate these patterns is a partially functional calculator. This calculator can only perform three functions (It was on sale): - Add - Subtract - Multiply When you deploy this project you should have 3 API Gateways in your deployment logs, one for each of the states. You can hit the same URLs on all 3 to see the same responses. You pass in two query params for the numbers you want to use in the operation (firstNum and secondNum). If you don't provide a valid a default of 0 is used. ``` Addition - https://{api gateway url}/add?firstNum=3&secondNum=4 Subtraction - https://{api gateway url}/subtract?firstNum=4&secondNum=3 Multiply - https://{api gateway url}/multiply?firstNum=3&secondNum=4 ``` ## There's A Lot Of Code Here, What Should I Actually Look At? There are 3 distinct CDK stacks in this project which are all instantiated in the [bin file](bin/the-lambda-trilogy.ts). When CDK deploys this application you should see 3 different cloudformation stacks in the AWS Console and if you update the code in one but not the other 2 you should see CDK only deploy the one you changed. This is a pretty cool, advanced feature of AWS CDK. ![bin file](img/bin.png) ### TheSinglePurposeFunctionStack You can see inside our [stack definition](lib/the-single-purpose-function-stack.ts) that this project has 3 endpoints defined on the api gateway and 3 [lambdas](lambdas/single-purpose-function) defined. ![api gateway](img/spf_apigw.png) If you look carefully inside each lambda you will notice that they only perform a single operation (add, subtract or multiply) but you will also see a [duplicated function](lambdas/single-purpose-function/add.ts#L16) sendRes that formats the response from the Lambda for API Gateway. ![lambda](img/spf_add_lambda.png) You could use layers or create a package that you install via npm for these kinds of things but in the purest representation of this pattern for the purpose of autonomy you see small levels of code duplication. This is a positive when you want to move a different direction with one function and a negative if you need to update them all. ### TheFatLambdaStack The big difference between this implementation and the one above is that all 3 functions (add, subtract and multiply) are inside the same [TS file](lambdas/fat-lambda/fat-lambda.ts). This means that we can still define 3 lambdas inside our cdk logic but we point to a different method in the same file all 3 times: ![lambda definition](img/fl_cdk.png) You should also notice that the [sendRes method](lambdas/fat-lambda/fat-lambda.ts#L49) is no longer duplicated with this pattern as all 3 lambdas can just call the same one. ### TheLambdalithStack OK, this state is very different from the other two. The cdk for this is bare bones, just one lambda function and a proxy api gateway: ![lambdalith cdk](img/lambdalith_cdk2.png) All of the action takes place inside [the lambda-lith](lambdas/the-lambda-lith) itself. #### [package.json](lambdas/the-lambda-lith/package.json) Now that we are building an application using express.js inside our lambda we need to pull it in as a dependency. We also need to pull in aws-serverless-express to make express compatible with lambda and api gateway. You will be responsible for keeping these versions up to date unlike using API Gateway for the routing. I did include a start command for starting up the express server locally to show the advantage of this pattern ![package json](img/lambdalith_package.png) #### Routing Inside the [lambdalith lambda](lambdas/the-lambda-lith/lambdalith.ts) we use express for routing to logic in this file rather than using API Gateway in the previous states: ![express routing](img/lambdalith_routes.png) #### Local vs Deployed We also need to define logic for how to start up the server locally vs in a lambda container since locally we don't need the aws-serverless-express adapter ![lambdalith startup](img/lambdalith_startup.png) ## Useful commands * `npm run build` compile typescript to js * `npm run watch` watch for changes and compile * `npm run test` perform the jest unit tests * `npm run deploy` deploy this stack to your default AWS account/region * `cdk diff` compare deployed stack with current state * `cdk synth` emits the synthesized CloudFormation template
{ "pile_set_name": "Github" }
# ----------------------------------------------------------------------------- # Copyright (c) 2015, Nicolas P. Rougier. All Rights Reserved. # Distributed under the (new) BSD License. See LICENSE.txt for more info. # ----------------------------------------------------------------------------- import numpy as np import matplotlib.pyplot as plt eqs = [] eqs.append((r"$W^{3\beta}_{\delta_1 \rho_1 \sigma_2} = U^{3\beta}_{\delta_1 \rho_1} + \frac{1}{8 \pi 2} \int^{\alpha_2}_{\alpha_2} d \alpha^\prime_2 \left[\frac{ U^{2\beta}_{\delta_1 \rho_1} - \alpha^\prime_2U^{1\beta}_{\rho_1 \sigma_2} }{U^{0\beta}_{\rho_1 \sigma_2}}\right]$")) eqs.append((r"$\frac{d\rho}{d t} + \rho \vec{v}\cdot\nabla\vec{v} = -\nabla p + \mu\nabla^2 \vec{v} + \rho \vec{g}$")) eqs.append((r"$\int_{-\infty}^\infty e^{-x^2}dx=\sqrt{\pi}$")) eqs.append((r"$E = mc^2 = \sqrt{{m_0}^2c^4 + p^2c^2}$")) eqs.append((r"$F_G = G\frac{m_1m_2}{r^2}$")) plt.axes([0.025,0.025,0.95,0.95]) for i in range(24): index = np.random.randint(0,len(eqs)) eq = eqs[index] size = np.random.uniform(12,32) x,y = np.random.uniform(0,1,2) alpha = np.random.uniform(0.25,.75) plt.text(x, y, eq, ha='center', va='center', color="#11557c", alpha=alpha, transform=plt.gca().transAxes, fontsize=size, clip_on=True) plt.xticks([]), plt.yticks([]) # savefig('../figures/text_ex.png',dpi=48) plt.show()
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <dynaForm name="percentage" type="xmlform" width="100%" enableTemplate="1"> <PME_A type="hidden"/> <PME_TYPE type="hidden"/> <PME_PRO_UID type="hidden"/> <PME_DYN_TYPE type="phpvariable"/> <PME_TITLE type="title" enableHTML="1"> <en><![CDATA[Properties]]></en> </PME_TITLE> <PME_XMLNODE_NAME type="text" size="32" maxlength="64" validate="NodeName" dependentFields="PME_VALIDATE_NAME" required="1"> <en><![CDATA[Field Name]]></en> </PME_XMLNODE_NAME> <PME_XMLNODE_NAME_OLD type="hidden"/> <PME_VALIDATE_NAME type="hidden" sqlConnection="XMLDB"> SELECT XMLNODE_NAME, TYPE FROM dynaForm WHERE XMLNODE_NAME = @@PME_XMLNODE_NAME </PME_VALIDATE_NAME> <PME_LABEL type="text" maxlength="255" size="50"> <en><![CDATA[Label]]></en> </PME_LABEL> <PME_SUBTITLE3 type="title" enableHTML="1"> <en><![CDATA[Behaviour]]></en> </PME_SUBTITLE3> <!-- <PME_MAXLENGTH type="text" maxlength="10" size="10" validate="Int" defaultvalue="15"> <en>Max. Length</en> </PME_MAXLENGTH> --> <PME_VALIDATE type="dropdown" defaultvalue="Real"> <en><![CDATA[Validate]]><option name="Int"><![CDATA[Integer]]></option><option name="Real"><![CDATA[Real Number]]></option></en> </PME_VALIDATE> <PME_COMMA_SEPARATOR type="dropdown" defaultvalue="."> <en><![CDATA[Decimal Separator]]><option name="."><![CDATA[Period [.]]]></option><option name=","><![CDATA[Comma [,]]]></option></en> </PME_COMMA_SEPARATOR> <PME_MASK type="text" maxlength="50" size="30" defaultvalue="###.## %"> <en><![CDATA[Mask]]></en> </PME_MASK> <PME_REQUIRED type="checkbox" falseValue="0" value="1" defaultvalue="0" labelOnRight="0"> <en><![CDATA[Required]]></en> </PME_REQUIRED> <PME_READONLY type="checkbox" falseValue="0" value="1" defaultvalue="0" labelOnRight="0"> <en><![CDATA[Read Only]]></en> </PME_READONLY> <PME_DEFAULTVALUE type="text" maxlength="50" size="30" defaultvalue=""> <en><![CDATA[Default Value]]></en> </PME_DEFAULTVALUE> <PME_HINT type="textarea" cols="47" rows="3"> <en><![CDATA[Hint]]></en> </PME_HINT> <PME_SUBTITLE type="title" enableHTML="1"> <en><![CDATA[Appearance]]></en> </PME_SUBTITLE> <PME_SIZE type="text" maxlength="10" size="10" validate="Int" defaultvalue="15"> <en><![CDATA[Size]]></en> </PME_SIZE> <PME_MODE type="dropdown" defaultvalue="edit"> <en><![CDATA[Mode]]><option name="edit"><![CDATA[Edit]]></option><option name="view"><![CDATA[View]]></option></en> </PME_MODE> <PME_SUBTITLE_OP type="title" enableHTML="1"> <en><![CDATA[Operations]]></en> </PME_SUBTITLE_OP> <PME_FORMULA type="text" maxlength="500" size="30" defaultvalue=""> <en><![CDATA[Formula]]></en> </PME_FORMULA> <PME_FUNCTION type="dropdown"> <en><![CDATA[Function]]><option name=""><![CDATA[None]]></option><option name="sum"><![CDATA[SUM]]></option><option name="avg"><![CDATA[AVG]]></option></en> </PME_FUNCTION> <PME_SUBTITLE2 type="title" enableHTML="1"> <en><![CDATA[Data]]></en> </PME_SUBTITLE2> <PME_SQLCONNECTION type="dropdown" sqlconnection="dbarray"><![CDATA[ SELECT * FROM DB_CONNECTIONS ]]><en><![CDATA[Sql Connection]]><option name=""><![CDATA[(none)]]></option></en></PME_SQLCONNECTION> <PME_XMLNODE_VALUE type="textarea" cols="47" rows="3"> <en><![CDATA[Sql]]></en> </PME_XMLNODE_VALUE> <BTN_CANCEL type="button" onclick="cancel();"> <en><![CDATA[Cancel]]></en> </BTN_CANCEL> <PME_ACCEPT type="button" onclick="fieldsSave( this.form );"> <en><![CDATA[Save]]></en> </PME_ACCEPT> <PME_JS type="javascript"><![CDATA[ var fieldForm="percentage"; var fieldName=getField("PME_XMLNODE_NAME",fieldForm); var fieldVal=getField("PME_VALIDATE",fieldForm); var fieldMask=getField("PME_MASK",fieldForm); var fieldSeparator=getField("PME_COMMA_SEPARATOR",fieldForm); var savedFieldName=fieldName.value; var pme_validating; fieldName.focus(); fieldName.select(); leimnud.event.add(fieldName, 'change', {method:dynaformVerifyFieldName, instance:fieldName, event:true}); leimnud.event.add(fieldVal, 'change', {method:checkVal, instance:fieldVal, event:true}); leimnud.event.add(fieldMask, 'change', {method:checkVal, instance:fieldMask, event:true}); leimnud.event.add(fieldSeparator, 'change', {method:checkSeparator, instance:fieldSeparator, event:true}); var dyntype="@#PME_DYN_TYPE"; if(dyntype != 'grid'){ //hideRowById('PME_SUBTITLE_OP'); //hideRowById('PME_FORMULA'); hideRowById('PME_FUNCTION'); } function cancel(){ currentPopupWindow.remove(); } function checkSeparator() { aux = fieldMask.value; var separatorOld; var separatorNew; switch (fieldSeparator.value) { case '.': separatorOld = ','; separatorNew = '.'; break; case ',': separatorOld = '.'; separatorNew = ','; break; default: separatorOld = ','; separatorNew = '.'; break; } aux = aux.replace(separatorOld, separatorNew); fieldMask.value = aux; } function checkVal(){ aux = fieldMask.value; aux = aux.replace('.000',''); var amount = fieldMask.value.length; var sw = 0; var i = 0; decimal = ' %'; while (sw == 0 && i < amount) { if(aux.charAt(amount - i - 1) == ',' || aux.charAt(amount - i - 1) == '.') { sw = 1; } else { i++; } } if (sw == 1) { decimal = aux.substring(amount - i, amount); aux = aux.substring(0, amount - i - 1); aux = aux.concat(' %'); } if (fieldVal.value === 'Real'){ var index = aux.indexOf(' '); aux = aux.substring(0, index); if (decimal != ' %') { aux = aux.concat(fieldSeparator.value, decimal); } else { aux = aux.concat(fieldSeparator.value,'##',' %'); } } fieldMask.value = aux; } ]]></PME_JS> </dynaForm>
{ "pile_set_name": "Github" }
jQuery( document ).ready(function( $ ) { $( '#link_rel' ).prop( 'readonly', true ); $( '#linkxfndiv input' ).bind( 'click keyup', function() { var isMe = $( '#me' ).is( ':checked' ), inputs = ''; $( 'input.valinp' ).each( function() { if ( isMe ) { $( this ).prop( 'disabled', true ).parent().addClass( 'disabled' ); } else { $( this ).removeAttr( 'disabled' ).parent().removeClass( 'disabled' ); if ( $( this ).is( ':checked' ) && $( this ).val() !== '') { inputs += $( this ).val() + ' '; } } }); $( '#link_rel' ).val( ( isMe ) ? 'me' : inputs.substr( 0,inputs.length - 1 ) ); }); });
{ "pile_set_name": "Github" }
/* * SonarQube * Copyright (C) 2009-2020 SonarSource SA * mailto:info AT sonarsource DOT com * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.application.config; import java.util.Optional; import org.sonar.process.Props; public interface AppSettings { Props getProps(); Optional<String> getValue(String key); void reload(Props copy); }
{ "pile_set_name": "Github" }
import React from 'react'; import { withStyles, Card, CardContent, CardHeader, CardActions, Typography, } from 'material-ui'; import PropTypes from 'prop-types'; import chartCardStyle from 'variables/styles/chartCardStyle'; /* eslint-disable no-nested-ternary */ function ChartCard({ ...props }) { const { classes, chartColor, statIconColor, chart, title, text, statLink, statText, } = props; return ( <Card className={classes.card}> <CardHeader className={`${classes.cardHeader} ${ classes[`${chartColor}CardHeader`] }`} subheader={chart} /> <CardContent className={classes.cardContent}> <Typography variant="title" component="h4" className={classes.cardTitle} > {title} </Typography> <Typography component="p" className={classes.cardCategory}> {text} </Typography> </CardContent> <CardActions className={classes.cardActions}> <div className={classes.cardStats}> <props.statIcon className={`${classes.cardStatsIcon} ${ classes[`${statIconColor}CardStatsIcon`] }`} />{' '} {statLink !== undefined ? ( <a href={statLink.href} className={classes.cardStatsLink}> {statLink.text} </a> ) : statText !== undefined ? ( statText ) : null} </div> </CardActions> </Card> ); } ChartCard.defaultProps = { statIconColor: 'gray', chartColor: 'purple', }; ChartCard.propTypes = { classes: PropTypes.object.isRequired, chart: PropTypes.object.isRequired, title: PropTypes.node, text: PropTypes.node, statIcon: PropTypes.func.isRequired, statIconColor: PropTypes.oneOf([ 'warning', 'primary', 'danger', 'success', 'info', 'rose', 'gray', ]), chartColor: PropTypes.oneOf(['orange', 'green', 'red', 'blue', 'purple']), statLink: PropTypes.object, statText: PropTypes.node, }; export default withStyles(chartCardStyle)(ChartCard);
{ "pile_set_name": "Github" }
THE FOLLOWING SETS FORTH ATTRIBUTION NOTICES FOR THIRD PARTY SOFTWARE THAT MAY BE CONTAINED IN PORTIONS OF THE PARSE PRODUCT. ----- The following software may be included in this product: AFNetworking. This software contains the following license and notice below: Copyright (c) 2011 Gowalla (http://gowalla.com/) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ----- The following software may be included in this product: EGOTableViewPullRefresh. This software contains the following license and notice below: // // EGORefreshTableHeaderView.h // Demo // // Created by Devin Doty on 10/14/09October14. // Copyright 2009 enormego. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // ----- The following software may be included in this product: MBProgressHUD. This software contains the following license and notice below: Copyright (c) 2013 Matej Bukovinski Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ----- The following software may be included in this product: OAuthCore. This software contains the following license and notice below: Copyright (C) 2012 Loren Brichter Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ----- The following software may be included in this product: SBJson. This software contains the following license and notice below: Copyright (C) 2007-2011 Stig Brautaset. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{ "pile_set_name": "Github" }
<?php return [ [ 'key' => 'account', 'name' => 'shop::app.layouts.my-account', 'route' =>'customer.profile.index', 'sort' => 1, ], [ 'key' => 'account.profile', 'name' => 'shop::app.layouts.profile', 'route' =>'customer.profile.index', 'sort' => 1, ], [ 'key' => 'account.address', 'name' => 'shop::app.layouts.address', 'route' =>'customer.address.index', 'sort' => 2, ], [ 'key' => 'account.reviews', 'name' => 'shop::app.layouts.reviews', 'route' =>'customer.reviews.index', 'sort' => 3, ], [ 'key' => 'account.wishlist', 'name' => 'shop::app.layouts.wishlist', 'route' =>'customer.wishlist.index', 'sort' => 4, ], [ 'key' => 'account.compare', 'name' => 'shop::app.customer.compare.text', 'route' =>'velocity.customer.product.compare', 'sort' => 5, ], [ 'key' => 'account.orders', 'name' => 'shop::app.layouts.orders', 'route' =>'customer.orders.index', 'sort' => 6, ], [ 'key' => 'account.downloadables', 'name' => 'shop::app.layouts.downloadable-products', 'route' =>'customer.downloadable_products.index', 'sort' => 7, ] ]; ?>
{ "pile_set_name": "Github" }
package com.hubspot.singularity.data.history; import com.hubspot.singularity.ExtendedTaskState; import com.hubspot.singularity.SingularityDeployHistory; import com.hubspot.singularity.SingularityRequest; import com.hubspot.singularity.SingularityTaskHistory; import com.hubspot.singularity.data.history.SingularityMappers.SingularityRequestIdCount; import java.util.Date; import java.util.List; import java.util.Optional; import org.jdbi.v3.json.Json; import org.jdbi.v3.sqlobject.SingleValue; import org.jdbi.v3.sqlobject.customizer.Bind; import org.jdbi.v3.sqlobject.statement.SqlQuery; import org.jdbi.v3.sqlobject.statement.SqlUpdate; public interface PostgresHistoryJDBI extends AbstractHistoryJDBI { @SqlUpdate( "INSERT INTO requestHistory (requestId, json, createdAt, requestState, f_user, message) VALUES (:requestId, :json, :createdAt, :requestState, :user, :message)" ) void insertRequestHistory( @Bind("requestId") String requestId, @Bind("json") @Json SingularityRequest request, @Bind("createdAt") Date createdAt, @Bind("requestState") String requestState, @Bind("user") String user, @Bind("message") String message ); @SqlUpdate( "INSERT INTO deployHistory (requestId, deployId, createdAt, f_user, message, deployStateAt, deployState, json) VALUES (:requestId, :deployId, :createdAt, :user, :message, :deployStateAt, :deployState, :json)" ) void insertDeployHistory( @Bind("requestId") String requestId, @Bind("deployId") String deployId, @Bind("createdAt") Date createdAt, @Bind("user") String user, @Bind("message") String message, @Bind("deployStateAt") Date deployStateAt, @Bind("deployState") String deployState, @Bind("json") @Json SingularityDeployHistory deployHistory ); @SqlUpdate( "INSERT INTO taskHistory (requestId, taskId, json, updatedAt, lastTaskStatus, runId, deployId, host, startedAt, purged) VALUES (:requestId, :taskId, :json, :updatedAt, :lastTaskStatus, :runId, :deployId, :host, :startedAt, false)" ) void insertTaskHistory( @Bind("requestId") String requestId, @Bind("taskId") String taskId, @Bind("json") @Json SingularityTaskHistory taskHistory, @Bind("updatedAt") Date updatedAt, @Bind("lastTaskStatus") String lastTaskStatus, @Bind("runId") String runId, @Bind("deployId") String deployId, @Bind("host") String host, @Bind("startedAt") Date startedAt ); @SingleValue @SqlQuery("SELECT json FROM taskHistory WHERE taskId = :taskId") @Json SingularityTaskHistory getTaskHistoryForTask(@Bind("taskId") String taskId); @SingleValue @SqlQuery( "SELECT json FROM taskHistory WHERE requestId = :requestId AND runId = :runId" ) @Json SingularityTaskHistory getTaskHistoryForTaskByRunId( @Bind("requestId") String requestId, @Bind("runId") String runId ); @SingleValue @SqlQuery( "SELECT json FROM deployHistory WHERE requestId = :requestId AND deployId = :deployId" ) @Json SingularityDeployHistory getDeployHistoryForDeploy( @Bind("requestId") String requestId, @Bind("deployId") String deployId ); @SqlQuery( "SELECT requestId, deployId, createdAt, f_user, message, deployStateAt, deployState FROM deployHistory WHERE requestId = :requestId ORDER BY createdAt DESC OFFSET :limitStart LIMIT :limitCount" ) List<SingularityDeployHistory> getDeployHistoryForRequest( @Bind("requestId") String requestId, @Bind("limitStart") Integer limitStart, @Bind("limitCount") Integer limitCount ); @SqlQuery("SELECT COUNT(*) FROM deployHistory WHERE requestId = :requestId") int getDeployHistoryForRequestCount(@Bind("requestId") String requestId); @SqlQuery("SELECT COUNT(*) FROM requestHistory WHERE requestId = :requestId") int getRequestHistoryCount(@Bind("requestId") String requestId); @SqlQuery( "SELECT DISTINCT requestId as id FROM requestHistory WHERE requestId LIKE CONCAT(:requestIdLike, '%') OFFSET :limitStart LIMIT :limitCount" ) List<String> getRequestHistoryLike( @Bind("requestIdLike") String requestIdLike, @Bind("limitStart") Integer limitStart, @Bind("limitCount") Integer limitCount ); @SqlQuery( "SELECT requestId, COUNT(*) as count FROM taskHistory WHERE updatedAt \\< :updatedAt GROUP BY requestId" ) List<SingularityRequestIdCount> getRequestIdCounts(@Bind("updatedAt") Date updatedAt); @SqlQuery( "SELECT MIN(updatedAt) from (SELECT updatedAt FROM taskHistory WHERE requestId = :requestId ORDER BY updatedAt DESC LIMIT :limit) as alias" ) Date getMinUpdatedAtWithLimitForRequest( @Bind("requestId") String requestId, @Bind("limit") Integer limit ); @SqlUpdate( "UPDATE taskHistory SET json = NULL, purged = true WHERE requestId = :requestId AND purged = false AND updatedAt \\< :updatedAtBefore LIMIT :purgeLimitPerQuery" ) void updateTaskHistoryNullBytesForRequestBefore( @Bind("requestId") String requestId, @Bind("updatedAtBefore") Date updatedAtBefore, @Bind("purgeLimitPerQuery") Integer purgeLimitPerQuery ); @SqlUpdate( "DELETE FROM taskHistory WHERE requestId = :requestId AND updatedAt \\< :updatedAtBefore LIMIT :purgeLimitPerQuery" ) void deleteTaskHistoryForRequestBefore( @Bind("requestId") String requestId, @Bind("updatedAtBefore") Date updatedAtBefore, @Bind("purgeLimitPerQuery") Integer purgeLimitPerQuery ); @SqlQuery("SELECT DISTINCT requestId as id FROM taskHistory") List<String> getRequestIdsInTaskHistory(); @SqlQuery( "SELECT COUNT(*) FROM taskHistory WHERE requestId = :requestId AND purged = false AND updatedAt \\< :updatedAtBefore" ) int getUnpurgedTaskHistoryCountByRequestBefore( @Bind("requestId") String requestId, @Bind("updatedAtBefore") Date updatedAtBefore ); @SqlQuery("SELECT DISTINCT requestId AS id FROM requestHistory") List<String> getRequestIdsWithHistory(); @SqlUpdate( "DELETE FROM requestHistory WHERE requestId = :requestId AND createdAt \\< :threshold LIMIT :batchSize" ) int purgeRequestHistory( @Bind("requestId") String requestId, @Bind("threshold") Date threshold, @Bind("batchSize") int batchSize ); @SqlQuery("SELECT DISTINCT requestId AS id FROM deployHistory") List<String> getRequestIdsWithDeploys(); @SqlUpdate( "DELETE FROM deployHistory WHERE requestId = :requestId AND createdAt \\< :threshold LIMIT :batchSize" ) int purgeDeployHistory( @Bind("requestId") String requestId, @Bind("threshold") Date threshold, @Bind("batchSize") int batchSize ); // Deprecated queries for before json backfill is finished @Deprecated @SingleValue @SqlQuery("SELECT bytes FROM taskHistory WHERE taskId = :taskId") byte[] getTaskHistoryBytesForTask(@Bind("taskId") String taskId); @Deprecated @SingleValue @SqlQuery( "SELECT bytes FROM taskHistory WHERE requestId = :requestId AND runId = :runId" ) byte[] getTaskHistoryBytesForTaskByRunId( @Bind("requestId") String requestId, @Bind("runId") String runId ); @Deprecated @SingleValue @SqlQuery( "SELECT bytes FROM deployHistory WHERE requestId = :requestId AND deployId = :deployId" ) byte[] getDeployHistoryBytesForDeploy( @Bind("requestId") String requestId, @Bind("deployId") String deployId ); // Queries for history migration @SqlQuery( "SELECT bytes FROM taskHistory WHERE requestId = :requestId AND purged = false AND bytes != '' AND bytes IS NOT NULL LIMIT :limit" ) List<byte[]> getTasksWithBytes( @Bind("requestId") String requestId, @Bind("limit") int limit ); @SqlUpdate("UPDATE taskHistory SET json = :json, bytes = '' WHERE taskId = :taskId") void setTaskJson( @Bind("taskId") String taskId, @Bind("json") @Json SingularityTaskHistory taskHistory ); @SqlQuery( "SELECT request, createdAt FROM requestHistory WHERE request != '' AND request IS NOT NULL LIMIT :limit" ) List<SingularityRequestAndTime> getRequestsWithBytes(@Bind("limit") int limit); @SqlUpdate( "UPDATE requestHistory SET json = :json, request = '' WHERE requestId = :requestId AND createdAt = :createdAt" ) void setRequestJson( @Bind("requestId") String requestId, @Bind("createdAt") Date createdAt, @Bind("json") @Json SingularityRequest request ); @SqlQuery( "SELECT bytes FROM deployHistory WHERE requestId = :requestId AND bytes != '' AND bytes IS NOT NULL LIMIT :limit" ) List<byte[]> getDeploysWithBytes( @Bind("requestId") String requestId, @Bind("limit") int limit ); @SqlUpdate( "UPDATE deployHistory SET json = :json, bytes = '' WHERE requestId = :requestId AND deployId = :deployId" ) void setDeployJson( @Bind("requestId") String requestId, @Bind("deployId") String deployId, @Bind("json") @Json SingularityDeployHistory deployHistory ); //Postgres doesn't support index hinting @Override default boolean shouldAddForceIndexClause( Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> updatedBefore, Optional<Long> updatedAfter ) { return false; } @Override default String getRequestHistoryBaseQuery() { return "SELECT json, request, createdAt, requestState, f_user, message FROM requestHistory"; } default void close() {} }
{ "pile_set_name": "Github" }
/* * Copyright 2010-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license * that can be found in the LICENSE file. */ package codegen.boxing.boxing2 import kotlin.test.* fun printInt(x: Int) = println(x) fun printBoolean(x: Boolean) = println(x) fun foo(arg: Any) { if (arg is Int) printInt(arg) else if (arg is Boolean) printBoolean(arg) else println("other") } @Test fun runTest() { foo(1) foo(true) foo("Hello") }
{ "pile_set_name": "Github" }
Index: gcc-4.9.2/gcc/cp/Make-lang.in =================================================================== --- gcc-4.9.2/gcc/cp/Make-lang.in (revision 233574) +++ gcc-4.9.2/gcc/cp/Make-lang.in (working copy) @@ -111,7 +111,7 @@ else # deleting the $(srcdir)/cp/cfns.h file. $(srcdir)/cp/cfns.h: endif - gperf -o -C -E -k '1-6,$$' -j1 -D -N 'libc_name_p' -L ANSI-C \ + gperf -o -C -E -k '1-6,$$' -j1 -D -N 'libc_name_p' -L C++ \ $(srcdir)/cp/cfns.gperf --output-file $(srcdir)/cp/cfns.h # Index: gcc-4.9.2/gcc/cp/cfns.gperf =================================================================== --- gcc-4.9.2/gcc/cp/cfns.gperf (revision 233574) +++ gcc-4.9.2/gcc/cp/cfns.gperf (working copy) @@ -1,3 +1,5 @@ +%language=C++ +%define class-name libc_name %{ /* Copyright (C) 2000-2015 Free Software Foundation, Inc. @@ -16,14 +18,6 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see <http://www.gnu.org/licenses/>. */ -#ifdef __GNUC__ -__inline -#endif -static unsigned int hash (const char *, unsigned int); -#ifdef __GNUC__ -__inline -#endif -const char * libc_name_p (const char *, unsigned int); %} %% # The standard C library functions, for feeding to gperf; the result is used Index: gcc-4.9.2/gcc/cp/cfns.h =================================================================== --- gcc-4.9.2/gcc/cp/cfns.h (revision 233574) +++ gcc-4.9.2/gcc/cp/cfns.h (working copy) @@ -1,5 +1,5 @@ -/* ANSI-C code produced by gperf version 3.0.3 */ -/* Command-line: gperf -o -C -E -k '1-6,$' -j1 -D -N libc_name_p -L ANSI-C cfns.gperf */ +/* C++ code produced by gperf version 3.0.4 */ +/* Command-line: gperf -o -C -E -k '1-6,$' -j1 -D -N libc_name_p -L C++ --output-file cfns.h cfns.gperf */ #if !((' ' == 32) && ('!' == 33) && ('"' == 34) && ('#' == 35) \ && ('%' == 37) && ('&' == 38) && ('\'' == 39) && ('(' == 40) \ @@ -28,7 +28,7 @@ #error "gperf generated tables don't work with this execution character set. Please report a bug to <[email protected]>." #endif -#line 1 "cfns.gperf" +#line 3 "cfns.gperf" /* Copyright (C) 2000-2015 Free Software Foundation, Inc. @@ -47,26 +47,19 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see <http://www.gnu.org/licenses/>. */ -#ifdef __GNUC__ -__inline -#endif -static unsigned int hash (const char *, unsigned int); -#ifdef __GNUC__ -__inline -#endif -const char * libc_name_p (const char *, unsigned int); /* maximum key range = 391, duplicates = 0 */ -#ifdef __GNUC__ -__inline -#else -#ifdef __cplusplus -inline -#endif -#endif -static unsigned int -hash (register const char *str, register unsigned int len) +class libc_name { +private: + static inline unsigned int hash (const char *str, unsigned int len); +public: + static const char *libc_name_p (const char *str, unsigned int len); +}; + +inline unsigned int +libc_name::hash (register const char *str, register unsigned int len) +{ static const unsigned short asso_values[] = { 400, 400, 400, 400, 400, 400, 400, 400, 400, 400, @@ -122,14 +115,8 @@ along with GCC; see the file COPYING3. If not see return hval + asso_values[(unsigned char)str[len - 1]]; } -#ifdef __GNUC__ -__inline -#ifdef __GNUC_STDC_INLINE__ -__attribute__ ((__gnu_inline__)) -#endif -#endif const char * -libc_name_p (register const char *str, register unsigned int len) +libc_name::libc_name_p (register const char *str, register unsigned int len) { enum { Index: gcc-4.9.2/gcc/cp/except.c =================================================================== --- gcc-4.9.2/gcc/cp/except.c (revision 233574) +++ gcc-4.9.2/gcc/cp/except.c (working copy) @@ -1030,7 +1030,8 @@ nothrow_libfn_p (const_tree fn) unless the system headers are playing rename tricks, and if they are, we don't want to be confused by them. */ id = DECL_NAME (fn); - return !!libc_name_p (IDENTIFIER_POINTER (id), IDENTIFIER_LENGTH (id)); + return !!libc_name::libc_name_p (IDENTIFIER_POINTER (id), + IDENTIFIER_LENGTH (id)); } /* Returns nonzero if an exception of type FROM will be caught by a
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by informer-gen. DO NOT EDIT. package v1 import ( "context" time "time" networkingv1 "k8s.io/api/networking/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" watch "k8s.io/apimachinery/pkg/watch" internalinterfaces "k8s.io/client-go/informers/internalinterfaces" kubernetes "k8s.io/client-go/kubernetes" v1 "k8s.io/client-go/listers/networking/v1" cache "k8s.io/client-go/tools/cache" ) // IngressInformer provides access to a shared informer and lister for // Ingresses. type IngressInformer interface { Informer() cache.SharedIndexInformer Lister() v1.IngressLister } type ingressInformer struct { factory internalinterfaces.SharedInformerFactory tweakListOptions internalinterfaces.TweakListOptionsFunc namespace string } // NewIngressInformer constructs a new informer for Ingress type. // Always prefer using an informer factory to get a shared informer instead of getting an independent // one. This reduces memory footprint and number of connections to the server. func NewIngressInformer(client kubernetes.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer { return NewFilteredIngressInformer(client, namespace, resyncPeriod, indexers, nil) } // NewFilteredIngressInformer constructs a new informer for Ingress type. // Always prefer using an informer factory to get a shared informer instead of getting an independent // one. This reduces memory footprint and number of connections to the server. func NewFilteredIngressInformer(client kubernetes.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer { return cache.NewSharedIndexInformer( &cache.ListWatch{ ListFunc: func(options metav1.ListOptions) (runtime.Object, error) { if tweakListOptions != nil { tweakListOptions(&options) } return client.NetworkingV1().Ingresses(namespace).List(context.TODO(), options) }, WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) { if tweakListOptions != nil { tweakListOptions(&options) } return client.NetworkingV1().Ingresses(namespace).Watch(context.TODO(), options) }, }, &networkingv1.Ingress{}, resyncPeriod, indexers, ) } func (f *ingressInformer) defaultInformer(client kubernetes.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer { return NewFilteredIngressInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions) } func (f *ingressInformer) Informer() cache.SharedIndexInformer { return f.factory.InformerFor(&networkingv1.Ingress{}, f.defaultInformer) } func (f *ingressInformer) Lister() v1.IngressLister { return v1.NewIngressLister(f.Informer().GetIndexer()) }
{ "pile_set_name": "Github" }
#!/bin/sh basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") case `uname` in *CYGWIN*) basedir=`cygpath -w "$basedir"`;; esac if [ -x "$basedir/node" ]; then "$basedir/node" "$basedir/../insert-module-globals/bin/cmd.js" "$@" ret=$? else node "$basedir/../insert-module-globals/bin/cmd.js" "$@" ret=$? fi exit $ret
{ "pile_set_name": "Github" }
<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <PropertyGroup> <TargetFrameworks>netstandard2.0;net472</TargetFrameworks> <DefineConstants>$(DefineConstants);NO_DOTNETCORE_BOOTSTRAP</DefineConstants> <AssemblyName>Fake.Core.Xml</AssemblyName> <OutputType>Library</OutputType> </PropertyGroup> <PropertyGroup> <DefineConstants>$(DefineConstants);NETSTANDARD;USE_HTTPCLIENT</DefineConstants> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)' == 'Release' "> <DefineConstants>$(DefineConstants);RELEASE</DefineConstants> </PropertyGroup> <ItemGroup> <Compile Include="AssemblyInfo.fs" /> <Compile Include="Xml.fs" /> </ItemGroup> <ItemGroup> <ProjectReference Include="..\Fake.Core.String\Fake.Core.String.fsproj" /> </ItemGroup> <Import Project="..\..\..\.paket\Paket.Restore.targets" /> </Project>
{ "pile_set_name": "Github" }