metadata
base_model: distilbert/distilbert-base-uncased-finetuned-sst-2-english
library_name: sentence-transformers
metrics:
- pearson_cosine
- spearman_cosine
- pearson_manhattan
- spearman_manhattan
- pearson_euclidean
- spearman_euclidean
- pearson_dot
- spearman_dot
- pearson_max
- spearman_max
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- sentence-similarity
- feature-extraction
- generated_from_trainer
- dataset_size:302
- loss:CosineSimilarityLoss
widget:
- source_sentence: |-
interface Input {
id: number;
title: string;
parent_id: number | null;
}
interface Output extends Input {
children?: Output[];
}
function doJob(inputItems: Input[], parent_id?: number) {
const outputItems: Output[] = [];
for (let i = 0; i < inputItems.length; i++) {
const children = doJob(inputItems.slice(i, inputItems.length), inputItems[i].parent_id)
.filter(i => i.parent_id === parent_id);
outputItems.push({...item, children});
}
return outputItems;
}
sentences:
- |
interface Task {
id: number;
title: string;
parent_id: number | null;
children?: Task[];
}
function buildTaskTree(tasks: Task[]): Task[] {
const tasksMap = tasks.reduce((acc, task) => {
acc[task.id] = { ...task, children: [] };
return acc;
}, {} as { [key: number]: Task });
const rootTasks: Task[] = [];
tasks.forEach(task => {
const { id, parent_id } = task;
if (parent_id === null) {
rootTasks.push(tasksMap[id]);
} else {
if (tasksMap[parent_id]) {
tasksMap[parent_id].children.push(tasksMap[id]);
}
}
});
return rootTasks;
}
// Test the function with the provided example
const inputTasks: Task[] = [
{ id: 1, title: 'Task 1', parent_id: null },
{ id: 2, title: 'Task 2', parent_id: 1 },
{ id: 3, title: 'Task 3', parent_id: 1 }
];
const outputTasks: Task[] = buildTaskTree(inputTasks);
console.log(outputTasks);
- |
const http = require('http');
async function checkUrlsStatus(urls) {
const statusObj = {};
const getStatus = async (url) => {
return new Promise((resolve) => {
http.get(url, (res) => {
resolve(res.statusCode);
}).on('error', (error) => {
resolve(500); // Internal Server Error
});
});
};
await Promise.all(urls.map(async (url) => {
const status = await getStatus(url);
statusObj[url] = status;
}));
return statusObj;
}
// Example
const urls = ['https://example.com', 'https://google.com'];
checkUrlsStatus(urls)
.then((result) => {
console.log(result);
})
.catch((error) => {
console.error(error);
});
module.exports = checkUrlsStatus;
- >
def find_longest_word(words):
max_length = 0
longest_word = ''
for word in words:
if len(word) > max_length:
max_length = len(word)
longest_word = word
return longest_word, max_length
# Test cases
print(find_longest_word(['hello', 'world', 'python', 'programming'])) #
Output: ('programming', 11)
print(find_longest_word(['short', 'longer', 'longest', 'size'])) #
Output: ('longest', 7)
- source_sentence: |-
// inventory.module.ts
import { Module } from '@nestjs/common';
import { InventoryService } from './inventory.service';
import { InventoryController } from './inventory.controller';
import { TypeOrmModule } from '@nestjs/typeorm';
import { Product } from './product.entity';
@Module({
imports: [TypeOrmModule.forFeature([Product])],
providers: [InventoryService],
controllers: [InventoryController],
})
export class InventoryModule {}
// inventory.service.ts
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Product } from './product.entity';
import { CreateProductDto, UpdateProductDto } from './product.dto';
@Injectable()
export class InventoryService {
constructor(
@InjectRepository(Product)
private readonly productRepository: Repository<Product>,
) {}
async createProduct(createProductDto: CreateProductDto): Promise<Product> {
const newProduct = new Product();
newProduct.name = createProductDto.name;
newProduct.description = createProductDto.description;
newProduct.price = createProductDto.price;
newProduct.availableQuantity = createProductDto.availableQuantity;
return await this.productRepository.save(newProduct);
}
async updateProduct(
productId: number,
updateProductDto: UpdateProductDto,
): Promise<Product> {
const product = await this.productRepository.findOne(productId);
if (!product) {
throw new NotFoundException('Product not found');
}
product.name = updateProductDto.name || product.name;
product.description = updateProductDto.description || product.description;
product.price = updateProductDto.price || product.price;
product.availableQuantity =
updateProductDto.availableQuantity || product.availableQuantity;
return await this.productRepository.save(product);
}
async findAllProducts(): Promise<Product[]> {
return await this.productRepository.find();
}
async getProductById(productId: number): Promise<Product> {
const product = await this.productRepository.findOne(productId);
if (!product) {
throw new NotFoundException('Product not found');
}
return product;
}
async checkProductAvailability(productId: number, quantity: number): Promise<boolean> {
const product = await this.productRepository.findOne(productId);
if (!product) {
throw new NotFoundException('Product not found');
}
return product.availableQuantity >= quantity;
}
}
sentences:
- >
// inventory.dto.ts
import { IsInt, IsNotEmpty, IsNumber, IsString, Min } from
'class-validator';
export class ProductDto {
@IsString()
@IsNotEmpty()
id: string;
@IsString()
@IsNotEmpty()
name: string;
@IsString()
description: string;
@IsNumber()
@IsNotEmpty()
price: number;
@IsInt()
@Min(0)
@IsNotEmpty()
availableQuantity: number;
}
// inventory.interface.ts
export interface Product {
id: string;
name: string;
description: string;
price: number;
availableQuantity: number;
}
// inventory.module.ts
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { InventoryController } from './inventory.controller';
import { InventoryService } from './inventory.service';
import { Product } from './product.entity';
@Module({
imports: [TypeOrmModule.forFeature([Product])],
controllers: [InventoryController],
providers: [InventoryService]
})
export class InventoryModule {}
// product.entity.ts
import { Entity, Column, PrimaryGeneratedColumn } from 'typeorm';
@Entity()
export class Product {
@PrimaryGeneratedColumn()
id: number;
@Column()
name: string;
@Column()
description: string;
@Column('decimal')
price: number;
@Column()
availableQuantity: number;
}
// inventory.controller.ts
import { Controller, Get, Post, Put, Body, Param } from
'@nestjs/common';
import { InventoryService } from './inventory.service';
import { ProductDto } from './inventory.dto';
@Controller('inventory')
export class InventoryController {
constructor(private readonly inventoryService: InventoryService) {}
@Post('add-product')
async addProduct(@Body() productDto: ProductDto) {
return this.inventoryService.addProduct(productDto);
}
@Get('products')
async getProducts() {
return this.inventoryService.getProducts();
}
@Put('update-quantity/:id')
async updateQuantity(@Param('id') id: string, @Body('quantity') quantity: number) {
return this.inventoryService.updateQuantity(id, quantity);
}
}
// inventory.service.ts
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { Product } from './product.entity';
import { ProductDto } from './inventory.dto';
@Injectable()
export class InventoryService {
constructor(
@InjectRepository(Product)
private productRepository: Repository<Product>,
) {}
async addProduct(productDto: ProductDto): Promise<Product> {
const newProduct = this.productRepository.create(productDto);
return this.productRepository.save(newProduct);
}
async getProducts(): Promise<Product[]> {
return this.productRepository.find();
}
async updateQuantity(id: string, quantity: number): Promise<Product> {
const product = await this.productRepository.findOne(id);
if (!product) {
throw new Error('Product not found');
}
product.availableQuantity = quantity;
return this.productRepository.save(product);
}
}
- |
def move_zeros_to_end(lst):
zero_count = 0
for i in range(len(lst)):
if lst[i] != 0:
lst[i], lst[zero_count] = lst[zero_count], lst[i]
zero_count += 1
# Test cases
lst1 = [0, 1, 0, 3, 12]
move_zeros_to_end(lst1)
print(lst1) # Output: [1, 3, 12, 0, 0]
lst2 = [0, 0, 1]
move_zeros_to_end(lst2)
print(lst2) # Output: [1, 0, 0]
- >
// inventory.dto.ts
import { IsInt, IsNotEmpty, IsNumber, IsString, Min } from
'class-validator';
export class ProductDto {
@IsString()
@IsNotEmpty()
id: string;
@IsString()
@IsNotEmpty()
name: string;
@IsString()
description: string;
@IsNumber()
@IsNotEmpty()
price: number;
@IsInt()
@Min(0)
@IsNotEmpty()
availableQuantity: number;
}
// inventory.interface.ts
export interface Product {
id: string;
name: string;
description: string;
price: number;
availableQuantity: number;
}
// inventory.module.ts
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { InventoryController } from './inventory.controller';
import { InventoryService } from './inventory.service';
import { Product } from './product.entity';
@Module({
imports: [TypeOrmModule.forFeature([Product])],
controllers: [InventoryController],
providers: [InventoryService]
})
export class InventoryModule {}
// product.entity.ts
import { Entity, Column, PrimaryGeneratedColumn } from 'typeorm';
@Entity()
export class Product {
@PrimaryGeneratedColumn()
id: number;
@Column()
name: string;
@Column()
description: string;
@Column('decimal')
price: number;
@Column()
availableQuantity: number;
}
// inventory.controller.ts
import { Controller, Get, Post, Put, Body, Param } from
'@nestjs/common';
import { InventoryService } from './inventory.service';
import { ProductDto } from './inventory.dto';
@Controller('inventory')
export class InventoryController {
constructor(private readonly inventoryService: InventoryService) {}
@Post('add-product')
async addProduct(@Body() productDto: ProductDto) {
return this.inventoryService.addProduct(productDto);
}
@Get('products')
async getProducts() {
return this.inventoryService.getProducts();
}
@Put('update-quantity/:id')
async updateQuantity(@Param('id') id: string, @Body('quantity') quantity: number) {
return this.inventoryService.updateQuantity(id, quantity);
}
}
// inventory.service.ts
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { Product } from './product.entity';
import { ProductDto } from './inventory.dto';
@Injectable()
export class InventoryService {
constructor(
@InjectRepository(Product)
private productRepository: Repository<Product>,
) {}
async addProduct(productDto: ProductDto): Promise<Product> {
const newProduct = this.productRepository.create(productDto);
return this.productRepository.save(newProduct);
}
async getProducts(): Promise<Product[]> {
return this.productRepository.find();
}
async updateQuantity(id: string, quantity: number): Promise<Product> {
const product = await this.productRepository.findOne(id);
if (!product) {
throw new Error('Product not found');
}
product.availableQuantity = quantity;
return this.productRepository.save(product);
}
}
- source_sentence: |-
// wage-input.dto.ts
import { IsNumber, IsPositive } from 'class-validator';
export class WageInputDto {
@IsNumber()
@IsPositive()
hourlyWage: number;
@IsNumber()
@IsPositive()
hoursWorked: number;
}
// It will handle the input validation too.
// employee.controller.ts
import { Body, Controller, Post } from '@nestjs/common';
import { WageInputDto } from './dto/wage-input.dto';
import { EmployeeService } from './employee.service';
@Controller('employee')
export class EmployeeController {
constructor(private readonly employeeService: EmployeeService) {}
@Post('/wage')
async getWage(@Body() input: WageInputDto) {
return this.employeeService.getWage(input);
}
}
// employee.service.ts
import { Injectable } from '@nestjs/common';
import { WageInputDto } from './dto/wage-input.dto';
const WEEKLY_HOURS = 40;
@Injectable()
export class EmployeeService {
async getWage(input: WageInputDto) {
let weeklyHours = 0;
let overTimeHours = 0;
let weeklyWage = 0;
const hasDoneOverTime = input.hoursWorked > WEEKLY_HOURS;
if (hasDoneOverTime) {
weeklyHours = WEEKLY_HOURS;
overTimeHours = input.hoursWorked - WEEKLY_HOURS;
} else {
weeklyHours = input.hoursWorked;
}
weeklyWage = weeklyHours * input.hourlyWage;
if (hasDoneOverTime) {
weeklyWage = weeklyWage + overTimeHours * (input.hourlyWage * 1.5);
}
return { weeklyWage };
}
}
sentences:
- >
import { Controller, Post, Body, HttpException, HttpStatus } from
'@nestjs/common';
interface WeeklyWageInput {
hourlyWage: number;
hoursWorked: number;
}
@Controller('calculate-weekly-wage')
export class WeeklyWageController {
@Post()
calculateWeeklyWage(@Body() data: WeeklyWageInput): { weeklyWage: number } {
// Input validation
if (data.hourlyWage <= 0 || data.hoursWorked <= 0 || !Number.isInteger(data.hoursWorked)) {
throw new HttpException('Invalid input. Hourly wage must be positive and hours worked must be a positive integer', HttpStatus.BAD_REQUEST);
}
const regularHours = Math.min(data.hoursWorked, 40);
const overtimeHours = Math.max(data.hoursWorked - 40, 0);
const weeklyWage = (regularHours * data.hourlyWage) + (overtimeHours * (1.5 * data.hourlyWage));
return { weeklyWage };
}
}
- |
import { Pipe, PipeTransform } from '@angular/core';
@Pipe({
name: 'orderBy'
})
export class OrderByPipe implements PipeTransform {
transform(array: any[], key: string, order: 'asc' | 'desc'): any[] {
if (!Array.isArray(array) || !key || (order !== 'asc' && order !== 'desc')) {
console.error('Invalid input data');
return array;
}
const compareFn = (a: any, b: any): number => {
if (a[key] < b[key]) {
return order === 'asc' ? -1 : 1;
}
if (a[key] > b[key]) {
return order === 'asc' ? 1 : -1;
}
return 0;
};
return array.slice().sort(compareFn);
}
}
- |
public class PalindromeChecker {
public static boolean isPalindrome(String str) {
str = str.toLowerCase().replaceAll("[^a-zA-Z0-9]", "");
int left = 0;
int right = str.length() - 1;
while (left < right) {
if (str.charAt(left) != str.charAt(right)) {
return false;
}
left++;
right--;
}
return true;
}
public static void main(String[] args) {
String input1 = "A man, a plan, a canal: Panama";
String input2 = "race a car";
System.out.println("Input: '" + input1 + "' Output: " + isPalindrome(input1));
System.out.println("Input: '" + input2 + "' Output: " + isPalindrome(input2));
}
}
- source_sentence: |-
FROM python:3.8
WORKDIR /app
COPY helloworld.py .
RUN pip install --no-cache-dir -r requirements.txt
CMD ["python", "helloworld.py"]
## PYTHON PROGRAM
helloworld.py
print("Hello, World!")
## BUILD COMMAND
docker build -t "python:helloworld" .
docker run -itd --name python python:helloworld
sentences:
- |
# Use a slim Python base image for optimization
FROM python:3.9-slim
# Set the working directory inside the container
WORKDIR /app
# Copy the Python script into the container
COPY hello.py /app/hello.py
# Define the command to run the Python script
CMD ["python", "/app/hello.py"]
- |
import java.util.HashMap;
public class Solution {
public int[] twoSum(int[] nums, int target) {
HashMap<Integer, Integer> map = new HashMap<>();
for (int i = 0; i < nums.length; i++) {
int complement = target - nums[i];
if (map.containsKey(complement)) {
return new int[]{map.get(complement), i};
}
map.put(nums[i], i);
}
return new int[]{};
}
}
// Example
int[] array = new int[]{2, 7, 11, 15};
int target = 9;
Solution solution = new Solution();
int[] result = solution.twoSum(array, target);
- |
function stripHtmlTags(input) {
if (!input) return '';
const tagRegex = /<[^>]*>/g;
return input.replace(tagRegex, '');
}
- source_sentence: >-
def move_zeroes(nums):
count = 0
for i in range(len(nums)):
if nums[i] != 0:
nums[count], nums[i]= nums[i], nums[count]
count += 1
for i in range(count, len(nums)):
nums[i] =0
input = [int(x) for x in input("Enter integers separated by spaces:
").split()]
move_zeroes(input)
print(input)
sentences:
- |
import 'package:flutter/material.dart';
import 'package:firebase_core/firebase_core.dart';
import 'package:firebase_auth/firebase_auth.dart';
import 'package:firebase_database/firebase_database.dart';
void main() async {
WidgetsFlutterBinding.ensureInitialized();
await Firebase.initializeApp();
runApp(MyApp());
}
class MyApp extends StatelessWidget {
final databaseRef = FirebaseDatabase.instance.reference().child('messages');
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: Text('Real-Time Messages'),
),
body: MessagesList(databaseRef: databaseRef),
floatingActionButton: AddMessageButton(databaseRef: databaseRef),
),
);
}
}
class MessagesList extends StatelessWidget {
final DatabaseReference databaseRef;
MessagesList({required this.databaseRef});
@override
Widget build(BuildContext context) {
return StreamBuilder(
stream: databaseRef.orderByChild('timestamp').onValue,
builder: (context, snapshot) {
if (snapshot.hasError) {
return Text('Error: ${snapshot.error}');
}
if (!snapshot.hasData) {
return Center(child: CircularProgressIndicator());
}
List<Message> messages = [];
snapshot.data!.snapshot.value.forEach((key, value) {
messages.add(Message.fromMap(value));
});
messages.sort((a, b) => a.timestamp.compareTo(b.timestamp));
return ListView.builder(
itemCount: messages.length,
itemBuilder: (context, index) {
return ListTile(
title: Text(messages[index].text),
);
},
);
},
);
}
}
class AddMessageButton extends StatelessWidget {
final DatabaseReference databaseRef;
AddMessageButton({required this.databaseRef});
@override
Widget build(BuildContext context) {
return FloatingActionButton(
onPressed: () {
databaseRef.push().set({
'text': 'New Message',
'timestamp': DateTime.now().millisecondsSinceEpoch
});
},
child: Icon(Icons.add),
);
}
}
class Message {
final String text;
final int timestamp;
Message({required this.text, required this.timestamp});
factory Message.fromMap(Map<dynamic, dynamic> map) {
return Message(
text: map['text'],
timestamp: map['timestamp'],
);
}
}
- |
using System;
using System.Collections.Generic;
class BracketChecker
{
private readonly Dictionary<char, char> bracketPairs = new Dictionary<char, char>
{
{ '(', ')' },
{ '[', ']' },
{ '{', '}' }
};
public bool CheckBalancedBrackets(string input)
{
if (string.IsNullOrEmpty(input))
{
return true;
}
Stack<char> stack = new Stack<char>();
foreach (char c in input)
{
if (bracketPairs.ContainsValue(c))
{
if (stack.Count == 0 || bracketPairs[stack.Peek()] != c)
{
return false;
}
stack.Pop();
}
else if (bracketPairs.ContainsKey(c))
{
stack.Push(c);
}
}
return stack.Count == 0;
}
}
class Program
{
static void Main()
{
BracketChecker bracketChecker = new BracketChecker();
string input1 = "(a+[b*c]-{d/e})";
Console.WriteLine("Input: \"{0}\"", input1);
Console.WriteLine("Output: {0}\n", bracketChecker.CheckBalancedBrackets(input1));
string input2 = "(a+[b*c)-{d/e}]";
Console.WriteLine("Input: \"{0}\"", input2);
Console.WriteLine("Output: {0}", bracketChecker.CheckBalancedBrackets(input2));
}
}
- |
def move_zeros_to_end(lst):
zero_count = 0
for i in range(len(lst)):
if lst[i] != 0:
lst[i], lst[zero_count] = lst[zero_count], lst[i]
zero_count += 1
# Test cases
lst1 = [0, 1, 0, 3, 12]
move_zeros_to_end(lst1)
print(lst1) # Output: [1, 3, 12, 0, 0]
lst2 = [0, 0, 1]
move_zeros_to_end(lst2)
print(lst2) # Output: [1, 0, 0]
model-index:
- name: >-
SentenceTransformer based on
distilbert/distilbert-base-uncased-finetuned-sst-2-english
results:
- task:
type: semantic-similarity
name: Semantic Similarity
dataset:
name: Unknown
type: unknown
metrics:
- type: pearson_cosine
value: 0.9000341656513303
name: Pearson Cosine
- type: spearman_cosine
value: 0.9013693287916293
name: Spearman Cosine
- type: pearson_manhattan
value: 0.8619949591168187
name: Pearson Manhattan
- type: spearman_manhattan
value: 0.8020438201628594
name: Spearman Manhattan
- type: pearson_euclidean
value: 0.868483180326987
name: Pearson Euclidean
- type: spearman_euclidean
value: 0.8234464507775442
name: Spearman Euclidean
- type: pearson_dot
value: 0.8494699061913786
name: Pearson Dot
- type: spearman_dot
value: 0.8947516297094024
name: Spearman Dot
- type: pearson_max
value: 0.9000341656513303
name: Pearson Max
- type: spearman_max
value: 0.9013693287916293
name: Spearman Max
SentenceTransformer based on distilbert/distilbert-base-uncased-finetuned-sst-2-english
This is a sentence-transformers model finetuned from distilbert/distilbert-base-uncased-finetuned-sst-2-english. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
Model Details
Model Description
- Model Type: Sentence Transformer
- Base model: distilbert/distilbert-base-uncased-finetuned-sst-2-english
- Maximum Sequence Length: 512 tokens
- Output Dimensionality: 768 tokens
- Similarity Function: Cosine Similarity
Model Sources
- Documentation: Sentence Transformers Documentation
- Repository: Sentence Transformers on GitHub
- Hugging Face: Sentence Transformers on Hugging Face
Full Model Architecture
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: DistilBertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
)
Usage
Direct Usage (Sentence Transformers)
First install the Sentence Transformers library:
pip install -U sentence-transformers
Then you can load this model and run inference.
from sentence_transformers import SentenceTransformer
# Download from the 🤗 Hub
model = SentenceTransformer("wasabibish/similarity-code-ai-generated")
# Run inference
sentences = [
'def move_zeroes(nums):\n count = 0\n for i in range(len(nums)):\n if nums[i] != 0:\n nums[count], nums[i]= nums[i], nums[count]\n count += 1\n for i in range(count, len(nums)):\n nums[i] =0\n\ninput = [int(x) for x in input("Enter integers separated by spaces: ").split()]\nmove_zeroes(input)\n\nprint(input)',
'def move_zeros_to_end(lst):\n zero_count = 0\n for i in range(len(lst)):\n if lst[i] != 0:\n lst[i], lst[zero_count] = lst[zero_count], lst[i]\n zero_count += 1\n\n# Test cases\nlst1 = [0, 1, 0, 3, 12]\nmove_zeros_to_end(lst1)\nprint(lst1) # Output: [1, 3, 12, 0, 0]\n\nlst2 = [0, 0, 1]\nmove_zeros_to_end(lst2)\nprint(lst2) # Output: [1, 0, 0]\n',
'using System;\nusing System.Collections.Generic;\n\nclass BracketChecker\n{\n private readonly Dictionary<char, char> bracketPairs = new Dictionary<char, char>\n {\n { \'(\', \')\' },\n { \'[\', \']\' },\n { \'{\', \'}\' }\n };\n\n public bool CheckBalancedBrackets(string input)\n {\n if (string.IsNullOrEmpty(input))\n {\n return true;\n }\n\n Stack<char> stack = new Stack<char>();\n\n foreach (char c in input)\n {\n if (bracketPairs.ContainsValue(c))\n {\n if (stack.Count == 0 || bracketPairs[stack.Peek()] != c)\n {\n return false;\n }\n stack.Pop();\n }\n else if (bracketPairs.ContainsKey(c))\n {\n stack.Push(c);\n }\n }\n\n return stack.Count == 0;\n }\n}\n\nclass Program\n{\n static void Main()\n {\n BracketChecker bracketChecker = new BracketChecker();\n\n string input1 = "(a+[b*c]-{d/e})";\n Console.WriteLine("Input: \\"{0}\\"", input1);\n Console.WriteLine("Output: {0}\\n", bracketChecker.CheckBalancedBrackets(input1));\n\n string input2 = "(a+[b*c)-{d/e}]";\n Console.WriteLine("Input: \\"{0}\\"", input2);\n Console.WriteLine("Output: {0}", bracketChecker.CheckBalancedBrackets(input2));\n }\n}\n',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]
# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]
Evaluation
Metrics
Semantic Similarity
- Evaluated with
EmbeddingSimilarityEvaluator
Metric | Value |
---|---|
pearson_cosine | 0.9 |
spearman_cosine | 0.9014 |
pearson_manhattan | 0.862 |
spearman_manhattan | 0.802 |
pearson_euclidean | 0.8685 |
spearman_euclidean | 0.8234 |
pearson_dot | 0.8495 |
spearman_dot | 0.8948 |
pearson_max | 0.9 |
spearman_max | 0.9014 |
Training Details
Training Dataset
Unnamed Dataset
- Size: 302 training samples
- Columns:
sentence1
,sentence2
, andscore
- Approximate statistics based on the first 302 samples:
sentence1 sentence2 score type string string float details - min: 3 tokens
- mean: 206.43 tokens
- max: 512 tokens
- min: 27 tokens
- mean: 244.9 tokens
- max: 512 tokens
- min: 0.0
- mean: 0.29
- max: 0.9
- Samples:
sentence1 sentence2 score from django.views.generic import ListView
class PersonListView(ListView):
model = Person
template_name = 'person_list.html'
def get_queryset(self):
return Person.objects.filter(birthdate__year__lte=2005)from myapp.models import Customer # Import the Customer model from your Django app
def get_customers_with_zip_code_starting_with_123():
customers = Customer.objects.filter(zip_code__startswith='123').values() # Query to filter customers with zip_code starting with '123'
return list(customers) # Return a list of dictionaries for matching records0.4
Welcome to our website!
function createSentence(words, maxChars) {
if (words.length === 0AAAAAA
#include
#include
class KMP {
public:
std::vector findPatternIndices(const CString& text, const CString& pattern) {
std::vector indices;
if (pattern.IsEmpty() - Loss:
CosineSimilarityLoss
with these parameters:{ "loss_fct": "torch.nn.modules.loss.MSELoss" }
Evaluation Dataset
Unnamed Dataset
- Size: 76 evaluation samples
- Columns:
sentence1
,sentence2
, andscore
- Approximate statistics based on the first 76 samples:
sentence1 sentence2 score type string string float details - min: 5 tokens
- mean: 216.92 tokens
- max: 512 tokens
- min: 54 tokens
- mean: 254.78 tokens
- max: 512 tokens
- min: 0.0
- mean: 0.33
- max: 0.9
- Samples:
sentence1 sentence2 score function stripHtmlTags(str) {
return str.replace(/<[^>]*>/g, '');
}
const input = 'Hello World!
';
const output = stripHtmlTags(input);
console.log(output);function stripHtmlTags(input) {
if (!input) return '';
const tagRegex = /<[^>]*>/g;
return input.replace(tagRegex, '');
}0.6
function getTopThreeWords($text) {
// Remove punctuation and convert to lowercase
$words = str_word_count(strtolower(preg_replace('/[^\p{L}\p{N}\s]/u', ' ', $text)), 1);
// Count the frequency of each word
$wordFrequency = array_count_values($words);
// Sort the words by frequency in descending order
arsort($wordFrequency);
// Get the top three words
$topThreeWords = array_slice($wordFrequency, 0, 3, true);
// Format the output
$output = [];
foreach ($topThreeWords as $word => $count) {
$output[] = "('$word', $count)";
}
return '[' . implode(', ', $output) . ']';
}
// Example usage:
$inputText = "The quick brown fox jumps over the lazy dog. The dog was lazy!";
echo getTopThreeWords($inputText);
?>
function countTopWords($inputString) {
// Convert the input string to lowercase and remove punctuation
$cleanString = preg_replace("/[\W_]+/", " ", strtolower($inputString));
// Split the string into an array of words
$words = explode(" ", $cleanString);
// Count the frequency of each word
$wordCount = array_count_values($words);
// Sort the words by frequency in descending order
arsort($wordCount);
// Get the top three most common words
$topWords = array_slice($wordCount, 0, 3);
// Format the output as an array of tuples
$output = [];
foreach ($topWords as $word => $count) {
$output[] = [$word, $count];
}
return $output;
}
// Test the function with the example input
$inputString = "The quick brown fox jumps over the lazy dog. The dog was lazy!";
$output = countTopWords($inputString);
print_r($output);
?>0.3
AAAAAA
#include
#include
class KMP {
public:
std::vector findPatternIndices(const CString& text, const CString& pattern) {
std::vector indices;
if (pattern.IsEmpty() - Loss:
CosineSimilarityLoss
with these parameters:{ "loss_fct": "torch.nn.modules.loss.MSELoss" }
Training Hyperparameters
Non-Default Hyperparameters
eval_strategy
: stepsweight_decay
: 0.2max_steps
: 100warmup_steps
: 150
All Hyperparameters
Click to expand
overwrite_output_dir
: Falsedo_predict
: Falseeval_strategy
: stepsprediction_loss_only
: Trueper_device_train_batch_size
: 8per_device_eval_batch_size
: 8per_gpu_train_batch_size
: Noneper_gpu_eval_batch_size
: Nonegradient_accumulation_steps
: 1eval_accumulation_steps
: Nonetorch_empty_cache_steps
: Nonelearning_rate
: 5e-05weight_decay
: 0.2adam_beta1
: 0.9adam_beta2
: 0.999adam_epsilon
: 1e-08max_grad_norm
: 1.0num_train_epochs
: 3.0max_steps
: 100lr_scheduler_type
: linearlr_scheduler_kwargs
: {}warmup_ratio
: 0.0warmup_steps
: 150log_level
: passivelog_level_replica
: warninglog_on_each_node
: Truelogging_nan_inf_filter
: Truesave_safetensors
: Truesave_on_each_node
: Falsesave_only_model
: Falserestore_callback_states_from_checkpoint
: Falseno_cuda
: Falseuse_cpu
: Falseuse_mps_device
: Falseseed
: 42data_seed
: Nonejit_mode_eval
: Falseuse_ipex
: Falsebf16
: Falsefp16
: Falsefp16_opt_level
: O1half_precision_backend
: autobf16_full_eval
: Falsefp16_full_eval
: Falsetf32
: Nonelocal_rank
: 0ddp_backend
: Nonetpu_num_cores
: Nonetpu_metrics_debug
: Falsedebug
: []dataloader_drop_last
: Falsedataloader_num_workers
: 0dataloader_prefetch_factor
: Nonepast_index
: -1disable_tqdm
: Falseremove_unused_columns
: Truelabel_names
: Noneload_best_model_at_end
: Falseignore_data_skip
: Falsefsdp
: []fsdp_min_num_params
: 0fsdp_config
: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}fsdp_transformer_layer_cls_to_wrap
: Noneaccelerator_config
: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}deepspeed
: Nonelabel_smoothing_factor
: 0.0optim
: adamw_torchoptim_args
: Noneadafactor
: Falsegroup_by_length
: Falselength_column_name
: lengthddp_find_unused_parameters
: Noneddp_bucket_cap_mb
: Noneddp_broadcast_buffers
: Falsedataloader_pin_memory
: Truedataloader_persistent_workers
: Falseskip_memory_metrics
: Trueuse_legacy_prediction_loop
: Falsepush_to_hub
: Falseresume_from_checkpoint
: Nonehub_model_id
: Nonehub_strategy
: every_savehub_private_repo
: Falsehub_always_push
: Falsegradient_checkpointing
: Falsegradient_checkpointing_kwargs
: Noneinclude_inputs_for_metrics
: Falseeval_do_concat_batches
: Truefp16_backend
: autopush_to_hub_model_id
: Nonepush_to_hub_organization
: Nonemp_parameters
:auto_find_batch_size
: Falsefull_determinism
: Falsetorchdynamo
: Noneray_scope
: lastddp_timeout
: 1800torch_compile
: Falsetorch_compile_backend
: Nonetorch_compile_mode
: Nonedispatch_batches
: Nonesplit_batches
: Noneinclude_tokens_per_second
: Falseinclude_num_input_tokens_seen
: Falseneftune_noise_alpha
: Noneoptim_target_modules
: Nonebatch_eval_metrics
: Falseeval_on_start
: Falseeval_use_gather_object
: Falsebatch_sampler
: batch_samplermulti_dataset_batch_sampler
: proportional
Training Logs
Epoch | Step | loss | spearman_max |
---|---|---|---|
0.5263 | 20 | 0.3765 | 0.5421 |
1.0526 | 40 | 0.1518 | 0.5774 |
1.5789 | 60 | 0.0501 | 0.8533 |
2.1053 | 80 | 0.0217 | 0.8900 |
2.6316 | 100 | 0.0168 | 0.9014 |
Framework Versions
- Python: 3.9.10
- Sentence Transformers: 3.1.0
- Transformers: 4.44.2
- PyTorch: 2.4.1+cpu
- Accelerate: 0.34.2
- Datasets: 3.0.0
- Tokenizers: 0.19.1
Citation
BibTeX
Sentence Transformers
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "https://arxiv.org/abs/1908.10084",
}