Merge branch 'master' into length-modification
16
BACKERS.md
@ -8,21 +8,7 @@
|
||||
|
||||
## `O(n²)` Backers
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/newrelic">
|
||||
<img
|
||||
src="https://avatars.githubusercontent.com/u/31739?s=200&v=4"
|
||||
width="50"
|
||||
height="50"
|
||||
/>
|
||||
</a>
|
||||
<br />
|
||||
<a href="https://github.com/newrelic">newrelic</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
`null`
|
||||
|
||||
## `O(n×log(n))` Backers
|
||||
|
||||
|
18
README.md
@ -9,6 +9,7 @@
|
||||
|
||||
[![CI](https://github.com/trekhleb/javascript-algorithms/workflows/CI/badge.svg)](https://github.com/trekhleb/javascript-algorithms/actions?query=workflow%3ACI+branch%3Amaster)
|
||||
[![codecov](https://codecov.io/gh/trekhleb/javascript-algorithms/branch/master/graph/badge.svg)](https://codecov.io/gh/trekhleb/javascript-algorithms)
|
||||
![repo size](https://img.shields.io/github/repo-size/trekhleb/javascript-algorithms.svg)
|
||||
|
||||
This repository contains JavaScript based examples of many
|
||||
popular algorithms and data structures.
|
||||
@ -133,6 +134,7 @@ a set of rules that precisely define a sequence of operations.
|
||||
* `B` [Shellsort](src/algorithms/sorting/shell-sort)
|
||||
* `B` [Counting Sort](src/algorithms/sorting/counting-sort)
|
||||
* `B` [Radix Sort](src/algorithms/sorting/radix-sort)
|
||||
* `B` [Bucket Sort](src/algorithms/sorting/bucket-sort)
|
||||
* **Linked Lists**
|
||||
* `B` [Straight Traversal](src/algorithms/linked-list/traversal)
|
||||
* `B` [Reverse Traversal](src/algorithms/linked-list/reverse-traversal)
|
||||
@ -363,22 +365,6 @@ Below is the list of some of the most used Big O notations and their performance
|
||||
|
||||
[Folks who are backing this project](https://github.com/trekhleb/javascript-algorithms/blob/master/BACKERS.md) `∑ = 1`
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/newrelic">
|
||||
<img
|
||||
src="https://avatars.githubusercontent.com/u/31739?s=200&v=4"
|
||||
width="50"
|
||||
height="50"
|
||||
/>
|
||||
</a>
|
||||
<br />
|
||||
<a href="https://github.com/newrelic">newrelic</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Author
|
||||
|
||||
[@trekhleb](https://trekhleb.dev)
|
||||
|
3400
package-lock.json
generated
@ -38,14 +38,14 @@
|
||||
"@babel/cli": "7.20.7",
|
||||
"@babel/preset-env": "7.20.2",
|
||||
"@types/jest": "29.4.0",
|
||||
"canvas": "2.11.0",
|
||||
"eslint": "8.33.0",
|
||||
"eslint-config-airbnb": "19.0.4",
|
||||
"eslint-plugin-import": "2.27.5",
|
||||
"eslint-plugin-jest": "27.2.1",
|
||||
"eslint-plugin-jsx-a11y": "6.7.1",
|
||||
"husky": "8.0.3",
|
||||
"jest": "29.4.1"
|
||||
"jest": "29.4.1",
|
||||
"pngjs": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.15.0",
|
||||
|
@ -0,0 +1,85 @@
|
||||
import fs from 'fs';
|
||||
import { PNG } from 'pngjs';
|
||||
|
||||
import resizeImageWidth from '../resizeImageWidth';
|
||||
|
||||
const testImageBeforePath = './src/algorithms/image-processing/seam-carving/__tests__/test-image-before.png';
|
||||
const testImageAfterPath = './src/algorithms/image-processing/seam-carving/__tests__/test-image-after.png';
|
||||
|
||||
/**
|
||||
* Compares two images and finds the number of different pixels.
|
||||
*
|
||||
* @param {ImageData} imgA - ImageData for the first image.
|
||||
* @param {ImageData} imgB - ImageData for the second image.
|
||||
* @param {number} threshold - Color difference threshold [0..255]. Smaller - stricter.
|
||||
* @returns {number} - Number of different pixels.
|
||||
*/
|
||||
function pixelsDiff(imgA, imgB, threshold = 0) {
|
||||
if (imgA.width !== imgB.width || imgA.height !== imgB.height) {
|
||||
throw new Error('Images must have the same size');
|
||||
}
|
||||
|
||||
let differentPixels = 0;
|
||||
const numColorParams = 4; // RGBA
|
||||
|
||||
for (let pixelIndex = 0; pixelIndex < imgA.data.length; pixelIndex += numColorParams) {
|
||||
// Get pixel's color for each image.
|
||||
const [aR, aG, aB] = imgA.data.subarray(pixelIndex, pixelIndex + numColorParams);
|
||||
const [bR, bG, bB] = imgB.data.subarray(pixelIndex, pixelIndex + numColorParams);
|
||||
|
||||
// Get average pixel's color for each image (make them greyscale).
|
||||
const aAvgColor = Math.floor((aR + aG + aB) / 3);
|
||||
const bAvgColor = Math.floor((bR + bG + bB) / 3);
|
||||
|
||||
// Compare pixel colors.
|
||||
if (Math.abs(aAvgColor - bAvgColor) > threshold) {
|
||||
differentPixels += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return differentPixels;
|
||||
}
|
||||
|
||||
const pngLoad = (path) => new Promise((resolve) => {
|
||||
fs.createReadStream(path)
|
||||
.pipe(new PNG())
|
||||
.on('parsed', function Parsed() {
|
||||
/** @type {ImageData} */
|
||||
const imageData = {
|
||||
colorSpace: 'srgb',
|
||||
width: this.width,
|
||||
height: this.height,
|
||||
data: this.data,
|
||||
};
|
||||
resolve(imageData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('resizeImageWidth', () => {
|
||||
it('should perform content-aware image width reduction', async () => {
|
||||
const imgBefore = await pngLoad(testImageBeforePath);
|
||||
const imgAfter = await pngLoad(testImageAfterPath);
|
||||
|
||||
const toWidth = Math.floor(imgBefore.width / 2);
|
||||
|
||||
const {
|
||||
img: imgResized,
|
||||
size: resizedSize,
|
||||
} = resizeImageWidth({ img: imgBefore, toWidth });
|
||||
|
||||
expect(imgResized).toBeDefined();
|
||||
expect(resizedSize).toBeDefined();
|
||||
|
||||
expect(resizedSize).toEqual({ w: toWidth, h: imgBefore.height });
|
||||
expect(imgResized.width).toBe(imgAfter.width);
|
||||
expect(imgResized.height).toBe(imgAfter.height);
|
||||
|
||||
const colorThreshold = 50;
|
||||
const differentPixels = pixelsDiff(imgResized, imgAfter, colorThreshold);
|
||||
|
||||
// Allow 10% of pixels to be different
|
||||
const pixelsThreshold = Math.floor((imgAfter.width * imgAfter.height) / 10);
|
||||
|
||||
expect(differentPixels).toBeLessThanOrEqual(pixelsThreshold);
|
||||
});
|
||||
});
|
@ -1,91 +0,0 @@
|
||||
import { createCanvas, loadImage } from 'canvas';
|
||||
import resizeImageWidth from '../resizeImageWidth';
|
||||
|
||||
const testImageBeforePath = './src/algorithms/image-processing/seam-carving/__tests__/test-image-before.jpg';
|
||||
const testImageAfterPath = './src/algorithms/image-processing/seam-carving/__tests__/test-image-after.jpg';
|
||||
|
||||
/**
|
||||
* Compares two images and finds the number of different pixels.
|
||||
*
|
||||
* @param {ImageData} imgA - ImageData for the first image.
|
||||
* @param {ImageData} imgB - ImageData for the second image.
|
||||
* @param {number} threshold - Color difference threshold [0..255]. Smaller - stricter.
|
||||
* @returns {number} - Number of different pixels.
|
||||
*/
|
||||
function pixelsDiff(imgA, imgB, threshold = 0) {
|
||||
if (imgA.width !== imgB.width || imgA.height !== imgB.height) {
|
||||
throw new Error('Images must have the same size');
|
||||
}
|
||||
|
||||
let differentPixels = 0;
|
||||
const numColorParams = 4; // RGBA
|
||||
|
||||
for (let pixelIndex = 0; pixelIndex < imgA.data.length; pixelIndex += numColorParams) {
|
||||
// Get pixel's color for each image.
|
||||
const [aR, aG, aB] = imgA.data.subarray(pixelIndex, pixelIndex + numColorParams);
|
||||
const [bR, bG, bB] = imgB.data.subarray(pixelIndex, pixelIndex + numColorParams);
|
||||
|
||||
// Get average pixel's color for each image (make them greyscale).
|
||||
const aAvgColor = Math.floor((aR + aG + aB) / 3);
|
||||
const bAvgColor = Math.floor((bR + bG + bB) / 3);
|
||||
|
||||
// Compare pixel colors.
|
||||
if (Math.abs(aAvgColor - bAvgColor) > threshold) {
|
||||
differentPixels += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return differentPixels;
|
||||
}
|
||||
|
||||
describe('resizeImageWidth', () => {
|
||||
it('should perform content-aware image width reduction', () => {
|
||||
// @see: https://jestjs.io/docs/asynchronous
|
||||
return Promise.all([
|
||||
loadImage(testImageBeforePath),
|
||||
loadImage(testImageAfterPath),
|
||||
]).then(([imgBefore, imgAfter]) => {
|
||||
// Original image.
|
||||
const canvasBefore = createCanvas(imgBefore.width, imgBefore.height);
|
||||
const ctxBefore = canvasBefore.getContext('2d');
|
||||
ctxBefore.drawImage(imgBefore, 0, 0, imgBefore.width, imgBefore.height);
|
||||
const imgDataBefore = ctxBefore.getImageData(0, 0, imgBefore.width, imgBefore.height);
|
||||
|
||||
// Resized image saved.
|
||||
const canvasAfter = createCanvas(imgAfter.width, imgAfter.height);
|
||||
const ctxAfter = canvasAfter.getContext('2d');
|
||||
ctxAfter.drawImage(imgAfter, 0, 0, imgAfter.width, imgAfter.height);
|
||||
const imgDataAfter = ctxAfter.getImageData(0, 0, imgAfter.width, imgAfter.height);
|
||||
|
||||
const toWidth = Math.floor(imgBefore.width / 2);
|
||||
|
||||
const {
|
||||
img: resizedImg,
|
||||
size: resizedSize,
|
||||
} = resizeImageWidth({ img: imgDataBefore, toWidth });
|
||||
|
||||
expect(resizedImg).toBeDefined();
|
||||
expect(resizedSize).toBeDefined();
|
||||
|
||||
// Resized image generated.
|
||||
const canvasTest = createCanvas(resizedSize.w, resizedSize.h);
|
||||
const ctxTest = canvasTest.getContext('2d');
|
||||
ctxTest.putImageData(resizedImg, 0, 0, 0, 0, resizedSize.w, resizedSize.h);
|
||||
const imgDataTest = ctxTest.getImageData(0, 0, resizedSize.w, resizedSize.h);
|
||||
|
||||
expect(resizedSize).toEqual({ w: toWidth, h: imgBefore.height });
|
||||
expect(imgDataTest.width).toBe(toWidth);
|
||||
expect(imgDataTest.height).toBe(imgBefore.height);
|
||||
expect(imgDataTest.width).toBe(imgAfter.width);
|
||||
expect(imgDataTest.height).toBe(imgAfter.height);
|
||||
|
||||
const colorThreshold = 50;
|
||||
const differentPixels = pixelsDiff(imgDataTest, imgDataAfter, colorThreshold);
|
||||
|
||||
// Allow 10% of pixels to be different
|
||||
const pixelsThreshold = Math.floor((imgAfter.width * imgAfter.height) / 10);
|
||||
|
||||
expect(differentPixels).toBeLessThanOrEqual(pixelsThreshold);
|
||||
});
|
||||
});
|
||||
});
|
Before Width: | Height: | Size: 5.9 KiB |
After Width: | Height: | Size: 6.6 KiB |
Before Width: | Height: | Size: 3.3 KiB |
After Width: | Height: | Size: 12 KiB |
@ -1,7 +1,7 @@
|
||||
# Factorial
|
||||
|
||||
_Read this in other languages:_
|
||||
[_简体中文_](README.zh-CN.md), [français](README.fr-FR.md), [turkish](README.tr-TR.md), [ქართული](README.ka-GE.md).
|
||||
[_简体中文_](README.zh-CN.md), [_Français_](README.fr-FR.md), [_Türkçe_](README.tr-TR.md), [_ქართული_](README.ka-GE.md), [_Українська_](README.uk-UA.md).
|
||||
|
||||
In mathematics, the factorial of a non-negative integer `n`,
|
||||
denoted by `n!`, is the product of all positive integers less
|
||||
|
33
src/algorithms/math/factorial/README.uk-UA.md
Normal file
@ -0,0 +1,33 @@
|
||||
# Факторіал
|
||||
|
||||
_Прочитайте це іншими мовами:_
|
||||
[_English_](README.md), [_简体中文_](README.zh-CN.md), [_Français_](README.fr-FR.md), [_Türkçe_](README.tr-TR.md), [_ქართული_](README.ka-GE.md).
|
||||
|
||||
У математиці факторіал невід'ємного цілого числа `n`, позначений `n!`, є добутком усіх натуральних чисел, менших або рівних `n`. Наприклад:
|
||||
|
||||
```
|
||||
5! = 5 * 4 * 3 * 2 * 1 = 120
|
||||
```
|
||||
|
||||
| n | n! |
|
||||
| --- | ----------------: |
|
||||
| 0 | 1 |
|
||||
| 1 | 1 |
|
||||
| 2 | 2 |
|
||||
| 3 | 6 |
|
||||
| 4 | 24 |
|
||||
| 5 | 120 |
|
||||
| 6 | 720 |
|
||||
| 7 | 5 040 |
|
||||
| 8 | 40 320 |
|
||||
| 9 | 362 880 |
|
||||
| 10 | 3 628 800 |
|
||||
| 11 | 39 916 800 |
|
||||
| 12 | 479 001 600 |
|
||||
| 13 | 6 227 020 800 |
|
||||
| 14 | 87 178 291 200 |
|
||||
| 15 | 1 307 674 368 000 |
|
||||
|
||||
## Посилання
|
||||
|
||||
[Wikipedia](https://uk.wikipedia.org/wiki/%D0%A4%D0%B0%D0%BA%D1%82%D0%BE%D1%80%D1%96%D0%B0%D0%BB)
|
46
src/algorithms/sorting/bucket-sort/BucketSort.js
Normal file
@ -0,0 +1,46 @@
|
||||
import RadixSort from '../radix-sort/RadixSort';
|
||||
|
||||
/**
|
||||
* Bucket Sort
|
||||
*
|
||||
* @param {number[]} arr
|
||||
* @param {number} bucketsNum
|
||||
* @return {number[]}
|
||||
*/
|
||||
export default function BucketSort(arr, bucketsNum = 1) {
|
||||
const buckets = new Array(bucketsNum).fill(null).map(() => []);
|
||||
|
||||
const minValue = Math.min(...arr);
|
||||
const maxValue = Math.max(...arr);
|
||||
|
||||
const bucketSize = Math.ceil(Math.max(1, (maxValue - minValue) / bucketsNum));
|
||||
|
||||
// Place elements into buckets.
|
||||
for (let i = 0; i < arr.length; i += 1) {
|
||||
const currValue = arr[i];
|
||||
const bucketIndex = Math.floor((currValue - minValue) / bucketSize);
|
||||
|
||||
// Edge case for max value.
|
||||
if (bucketIndex === bucketsNum) {
|
||||
buckets[bucketsNum - 1].push(currValue);
|
||||
} else {
|
||||
buckets[bucketIndex].push(currValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort individual buckets.
|
||||
for (let i = 0; i < buckets.length; i += 1) {
|
||||
// Let's use the Radix Sorter here. This may give us
|
||||
// the average O(n + k) time complexity to sort one bucket
|
||||
// (where k is a number of digits in the longest number).
|
||||
buckets[i] = new RadixSort().sort(buckets[i]);
|
||||
}
|
||||
|
||||
// Merge sorted buckets into final output.
|
||||
const sortedArr = [];
|
||||
for (let i = 0; i < buckets.length; i += 1) {
|
||||
sortedArr.push(...buckets[i]);
|
||||
}
|
||||
|
||||
return sortedArr;
|
||||
}
|
35
src/algorithms/sorting/bucket-sort/README.md
Normal file
@ -0,0 +1,35 @@
|
||||
# Bucket Sort
|
||||
|
||||
**Bucket sort**, or **bin sort**, is a sorting algorithm that works by distributing the elements of an array into a number of buckets. Each bucket is then sorted individually, either using a different sorting algorithm, or by recursively applying the bucket sorting algorithm.
|
||||
|
||||
## Algorithm
|
||||
|
||||
Bucket sort works as follows:
|
||||
|
||||
1. Set up an array of initially empty `buckets`.
|
||||
2. **Scatter:** Go over the original array, putting each object in its `bucket`.
|
||||
3. Sort each non-empty `bucket`.
|
||||
4. **Gather:** Visit the `buckets` in order and put all elements back into the original array.
|
||||
|
||||
Elements are distributed among bins:
|
||||
|
||||
![Elements are distributed among bins](./images/bucket_sort_1.png)
|
||||
|
||||
Then, elements are sorted within each bin:
|
||||
|
||||
![Elements are sorted within each bin](./images/bucket_sort_2.png)
|
||||
|
||||
|
||||
## Complexity
|
||||
|
||||
The computational complexity depends on the algorithm used to sort each bucket, the number of buckets to use, and whether the input is uniformly distributed.
|
||||
|
||||
The **worst-case** time complexity of bucket sort is
|
||||
`O(n^2)` if the sorting algorithm used on the bucket is *insertion sort*, which is the most common use case since the expectation is that buckets will not have too many elements relative to the entire list. In the worst case, all elements are placed in one bucket, causing the running time to reduce to the worst-case complexity of insertion sort (all elements are in reverse order). If the worst-case running time of the intermediate sort used is `O(n * log(n))`, then the worst-case running time of bucket sort will also be
|
||||
`O(n * log(n))`.
|
||||
|
||||
On **average**, when the distribution of elements across buckets is reasonably uniform, it can be shown that bucket sort runs on average `O(n + k)` for `k` buckets.
|
||||
|
||||
## References
|
||||
|
||||
- [Bucket Sort on Wikipedia](https://en.wikipedia.org/wiki/Bucket_sort)
|
@ -0,0 +1,33 @@
|
||||
import BucketSort from '../BucketSort';
|
||||
import {
|
||||
equalArr,
|
||||
notSortedArr,
|
||||
reverseArr,
|
||||
sortedArr,
|
||||
} from '../../SortTester';
|
||||
|
||||
describe('BucketSort', () => {
|
||||
it('should sort the array of numbers with different buckets amounts', () => {
|
||||
expect(BucketSort(notSortedArr, 4)).toEqual(sortedArr);
|
||||
expect(BucketSort(equalArr, 4)).toEqual(equalArr);
|
||||
expect(BucketSort(reverseArr, 4)).toEqual(sortedArr);
|
||||
expect(BucketSort(sortedArr, 4)).toEqual(sortedArr);
|
||||
|
||||
expect(BucketSort(notSortedArr, 10)).toEqual(sortedArr);
|
||||
expect(BucketSort(equalArr, 10)).toEqual(equalArr);
|
||||
expect(BucketSort(reverseArr, 10)).toEqual(sortedArr);
|
||||
expect(BucketSort(sortedArr, 10)).toEqual(sortedArr);
|
||||
|
||||
expect(BucketSort(notSortedArr, 50)).toEqual(sortedArr);
|
||||
expect(BucketSort(equalArr, 50)).toEqual(equalArr);
|
||||
expect(BucketSort(reverseArr, 50)).toEqual(sortedArr);
|
||||
expect(BucketSort(sortedArr, 50)).toEqual(sortedArr);
|
||||
});
|
||||
|
||||
it('should sort the array of numbers with the default buckets of 1', () => {
|
||||
expect(BucketSort(notSortedArr)).toEqual(sortedArr);
|
||||
expect(BucketSort(equalArr)).toEqual(equalArr);
|
||||
expect(BucketSort(reverseArr)).toEqual(sortedArr);
|
||||
expect(BucketSort(sortedArr)).toEqual(sortedArr);
|
||||
});
|
||||
});
|
BIN
src/algorithms/sorting/bucket-sort/images/bucket_sort_1.png
Normal file
After Width: | Height: | Size: 11 KiB |
BIN
src/algorithms/sorting/bucket-sort/images/bucket_sort_2.png
Normal file
After Width: | Height: | Size: 13 KiB |
@ -33,7 +33,7 @@ store them in memory, which gives at best a time complexity `O(n log n)`. That
|
||||
would seem to make radix sort at most equally efficient as the best
|
||||
comparison-based sorts (and worse if keys are much longer than `log n`).
|
||||
|
||||
![Radix Sort](https://www.researchgate.net/publication/291086231/figure/fig1/AS:614214452404240@1523451545568/Simplistic-illustration-of-the-steps-performed-in-a-radix-sort-In-this-example-the.png)
|
||||
![Radix Sort](./images/radix-sort.png)
|
||||
|
||||
## Complexity
|
||||
|
||||
|
BIN
src/algorithms/sorting/radix-sort/images/radix-sort.png
Normal file
After Width: | Height: | Size: 12 KiB |
@ -7,7 +7,7 @@ _Read this in other languages:_
|
||||
[_Português_](README.pt-BR.md),
|
||||
[_한국어_](README.ko-KR.md),
|
||||
[_Español_](README.es-ES.md),
|
||||
[_Turkish_](README.tr-TR.md),
|
||||
[_Türkçe_](README.tr-TR.md),
|
||||
[_Українська_](README.uk-UA.md)
|
||||
|
||||
In computer science, a **linked list** is a linear collection
|
||||
|