Compare commits
314 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 20927a97eb | |||
| ebe75b0fea | |||
| a6be24c59d | |||
| afdd27648e | |||
| 7e475b5472 | |||
| e65fde736b | |||
| bf0b4172de | |||
| 961b47a58c | |||
| 330be87338 | |||
| 4b30ce12c9 | |||
| 0be7dba5d3 | |||
|
|
6eaa992f5f | ||
| 9b23d713ff | |||
| 85d230c561 | |||
| 6afed5e174 | |||
| 640a1a8161 | |||
| 274d37c4d6 | |||
| a89c812c06 | |||
| 6af0656216 | |||
| e76db94136 | |||
| 7011101b05 | |||
| d0599b99ec | |||
| d7c5b9ad7d | |||
| d2c81e7779 | |||
| ef5fdbd377 | |||
| 5e67e45910 | |||
| 63ef68fb95 | |||
| 0f9751bf70 | |||
| bcc2ffdc13 | |||
| 2da4588430 | |||
| ed9ce60af4 | |||
| 4dd3a4a6e8 | |||
| efd29bbe7e | |||
| 96458bd8b6 | |||
| 477f3c9f40 | |||
| 3f6e7fbb88 | |||
| 5650c6e69b | |||
| 61df32b7b2 | |||
| 859d00da16 | |||
| 9df95c0fc3 | |||
| d6d2c0a396 | |||
| 127eb71968 | |||
| 6005d0fd39 | |||
| 290e626f07 | |||
| e30d011273 | |||
| 4c52636c23 | |||
| aa580c1772 | |||
| 8f7d49b280 | |||
| 342b0cece9 | |||
| 3ce52668df | |||
| ce9343348e | |||
| 8d9f7a6761 | |||
| 1b1ec4b87a | |||
| cc92ef953a | |||
| 8d8c30c9a7 | |||
| d44ad2d42e | |||
| 2ed090e989 | |||
| dafb162797 | |||
| 57280b8285 | |||
| c9fc963670 | |||
| cc87cae145 | |||
| f53d61a5bc | |||
| b29874189c | |||
| 6722f399db | |||
| 8e39acef45 | |||
| b065ebe9ba | |||
| 25a7cd060a | |||
| 21f330260e | |||
| 99e9afe7b1 | |||
| 5118ae8b1f | |||
| 03b23f0e3c | |||
| ca33c9eb9e | |||
| b4374fdd4d | |||
| 455ee64a88 | |||
| 893b371017 | |||
| 5fa9baae7a | |||
| 70d565c785 | |||
| 5c27e332cd | |||
| f2d919bae5 | |||
| 7d4c77332b | |||
| d1186d4f82 | |||
| 3b8a02ac51 | |||
| f356d4ab57 | |||
| b07bc85114 | |||
| c37746cd84 | |||
| fb7c417394 | |||
| ddc58ee221 | |||
| 8b5e5a75c1 | |||
| c1862e6b05 | |||
| c2c2780967 | |||
| 086d933a0d | |||
| 1e7da394a1 | |||
| 9eb220194f | |||
| 0baf141e4b | |||
| 569d4f5c86 | |||
| 36f24731b0 | |||
| ea5ace0166 | |||
| 663809ff27 | |||
| 5d6adb5446 | |||
| 212ad1ace2 | |||
| 707827929a | |||
| 27ad01657d | |||
| e7da7acc08 | |||
| 72943cb22c | |||
| 068cc4a4f7 | |||
| 31b34f5de1 | |||
| 4bded1af80 | |||
| 100c7fe260 | |||
| 04f628fcc1 | |||
| 5fa26bfbf3 | |||
| 4d3eaaf604 | |||
| 33ab7c48bf | |||
| 1f4784c75e | |||
| ae5a4ea818 | |||
| 5d66f7b453 | |||
| 12af656150 | |||
| 603ff3df0f | |||
| b712981718 | |||
| 863d1a1027 | |||
| 6287da8efe | |||
| c27e852ccb | |||
| 0157765628 | |||
| 8689037ceb | |||
| 03a8914986 | |||
| 26b9d6706d | |||
| a21d4a9eb6 | |||
| 212d614ebd | |||
| bcba3ccde0 | |||
| b42ad76372 | |||
| f95565771a | |||
| 3d6cbf5ac1 | |||
| fc5d43e14b | |||
| b3264748cd | |||
| ede40f06d0 | |||
| e69c675f82 | |||
| 17c1c90957 | |||
| c6f2f71f6c | |||
| d2c7c151bc | |||
| 4448eca787 | |||
| 0350a77dff | |||
| 4f98c599df | |||
| 103b2407d2 | |||
| 059da9be12 | |||
| 3cac3a997e | |||
| 3ffb001c73 | |||
| 2810a6f952 | |||
| 166362334b | |||
| d3f9103ebf | |||
| 1c5507d4a5 | |||
| 5e3cd9f484 | |||
| f27b884b6a | |||
| 769a9d8ea5 | |||
| 9499a7f0e0 | |||
| ca38583f96 | |||
| 9557a0cfb5 | |||
| c61e8bb7fc | |||
|
|
4fbcaa47f8 | ||
| 6f31fc5abb | |||
|
|
10ec0e35d0 | ||
| 29c4d253cf | |||
| 55c870d392 | |||
|
|
cda6ec066d | ||
| c8b89e6847 | |||
| b46a739008 | |||
| af62394651 | |||
|
|
bc75a5cd97 | ||
| 7d8af3af08 | |||
|
|
ef8197b275 | ||
|
|
58d9ac7fdd | ||
|
|
86a893916d | ||
|
|
e7a6292fe9 | ||
|
|
81209b0cf8 | ||
|
|
77b8147f9b | ||
|
|
32abd7e14e | ||
|
|
8a582f0b08 | ||
|
|
01be6f7d43 | ||
|
|
9011700879 | ||
|
|
12cd4e5e5f | ||
|
|
10201bcadb | ||
|
|
3b4fcc0069 | ||
|
|
31a1ef74fb | ||
|
|
9be3a1475f | ||
|
|
7591f396fd | ||
| 517b84042f | |||
| dbfae33074 | |||
| ead34009c6 | |||
| 7a4a0f05d1 | |||
| 8f32b2d887 | |||
| e6c9600b40 | |||
|
|
d5f2acb9ab | ||
| 3274e96355 | |||
| 78dfa286b0 | |||
| f32624d131 | |||
| 59a417056c | |||
| 18f6704ed7 | |||
| caf2f22e9f | |||
| 865966db55 | |||
| 6266a6293c | |||
| 70ea60aacf | |||
| 599cc44fff | |||
| e1df59d512 | |||
| eaa45976c7 | |||
| 362838c434 | |||
| 52caa9cdd2 | |||
| c6afdb8762 | |||
| c4964806f0 | |||
| a2506b927a | |||
| 53061d9f78 | |||
| 2555dc7fc2 | |||
| 058087e15a | |||
| c20c6393e5 | |||
| a25ec85ba2 | |||
| c930bda7a6 | |||
| e7bc242292 | |||
| b90a9ec7d8 | |||
| d18a35d9b7 | |||
| ffc34a131f | |||
| a38e7b069a | |||
| 9c15980e68 | |||
| a05689017e | |||
| 54c9660145 | |||
| 0c4ef7f6c4 | |||
| 61d7a88c82 | |||
| dff173222d | |||
| 6eded1f4be | |||
| 07b715dd4c | |||
| 1f1aa995df | |||
| 1e2703acf2 | |||
| 07abba6312 | |||
| 3f872463b6 | |||
| 531443531d | |||
| 49c5e9eadd | |||
| 3c5769e0e2 | |||
| 0e061921e9 | |||
| edca820634 | |||
| bc7afad793 | |||
| 86dbed4bf9 | |||
| 2f31d7b641 | |||
| ef0b9d23c3 | |||
| 6e30a54ead | |||
| fa3cf743f9 | |||
| 5b67e85009 | |||
| 59c4d1aa90 | |||
| abc1c075f5 | |||
| 4aac66b6fb | |||
| 17f0dddf20 | |||
| c5720e591c | |||
| 796cd7c007 | |||
| c9cd62f5d7 | |||
| fc1a738625 | |||
| 854678fd3c | |||
| 3fa75ba18e | |||
| 212f532350 | |||
| 29f2b8e305 | |||
| e30ca4103f | |||
| ec817fb9c2 | |||
| 88918dfb35 | |||
| 0f072ddd59 | |||
| a1b422dbe8 | |||
| 3af5ca49cc | |||
| 117d0f36e6 | |||
| aa1e7da38c | |||
| ea83f5017e | |||
| efc3745690 | |||
| 6ae4a8f582 | |||
| b46456dd32 | |||
| 4ae353a0bd | |||
| 45d118a77a | |||
| 73916f4fd9 | |||
| 4a3cc66401 | |||
| 0ed1208d59 | |||
| b99abaafec | |||
| ee02997881 | |||
| 626ab29fc7 | |||
| e6c4d43f6e | |||
| 880a9169e5 | |||
| 4c68c925d3 | |||
| 9d17dc17fd | |||
| 84bc785cdc | |||
| b784041894 | |||
| 2b408fb2b2 | |||
| 8618c10d01 | |||
| dff5e534ff | |||
| e95e0a3e8f | |||
| 3b27604258 | |||
| c0fef15e64 | |||
| 3c0f4acc05 | |||
| 406a57bb9d | |||
| b50307c88d | |||
| a28149ac8d | |||
| b55b049508 | |||
| ce8f49cd1f | |||
| 357b40e670 | |||
| a669f69d99 | |||
| f4c25c683f | |||
| 9f4bede08e | |||
| 3676849efc | |||
| 5ea1e3c358 | |||
| 500514fb20 | |||
| fd32f404c0 | |||
| d1453fbc95 | |||
| 8fee81bd19 | |||
| 7fd54cd094 | |||
| a44e091851 | |||
| ed193d9fc0 | |||
| ac4dab9d7e | |||
| cbb5dd0ee2 | |||
| 57baeb261e | |||
| bbcb290560 | |||
| 17886dde62 | |||
| 998272c8d5 | |||
| 13b2c727fc | |||
| 7fcfecf308 | |||
| a319e0136a |
@ -38,7 +38,7 @@
|
||||
<attribute name="gradle_used_by_scope" value="builder"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8/"/>
|
||||
<classpathentry kind="con" path="org.eclipse.buildship.core.gradleclasspathcontainer"/>
|
||||
<classpathentry kind="output" path="bin/default"/>
|
||||
</classpath>
|
||||
|
||||
33
.github/workflows/build_action.yml
vendored
Normal file
33
.github/workflows/build_action.yml
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
name: Latest Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
jdk: [8, 11, 16, 17, 18, 19, 20]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up JDK ${{ matrix.jdk }}
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: ${{ matrix.jdk }}
|
||||
|
||||
- name: Validate Gradle wrapper
|
||||
uses: gradle/wrapper-validation-action@v1
|
||||
|
||||
- name: Make gradlew executable
|
||||
run: chmod +x ./gradlew
|
||||
|
||||
- name: Build with Gradle
|
||||
run: ./gradlew build
|
||||
84
.github/workflows/build_tests_action.yml
vendored
Normal file
84
.github/workflows/build_tests_action.yml
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
name: Unit Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: temurin
|
||||
java-version: 11
|
||||
cache: gradle
|
||||
|
||||
- name: Make gradlew executable
|
||||
run: chmod +x ./gradlew
|
||||
- name: Build and Test
|
||||
run: |
|
||||
./gradlew generateTestSource test jacocoTestReport --info -Dfull_test_suite=true
|
||||
./gradlew --stop
|
||||
|
||||
- name: Publish Test Result
|
||||
uses: EnricoMi/publish-unit-test-result-action@v2
|
||||
id: test-results
|
||||
if: always()
|
||||
with:
|
||||
junit_files: build/test-results/**/*.xml
|
||||
fail_on: nothing
|
||||
ignore_runs: true
|
||||
json_thousands_separator: .
|
||||
time_unit: milliseconds
|
||||
|
||||
- name: Create Badge Color
|
||||
shell: bash
|
||||
run: |
|
||||
case ${{ fromJSON( steps.test-results.outputs.json ).conclusion }} in
|
||||
success)
|
||||
echo "BADGE_COLOR=31c653" >> $GITHUB_ENV
|
||||
;;
|
||||
failure)
|
||||
echo "BADGE_COLOR=800000" >> $GITHUB_ENV
|
||||
;;
|
||||
neutral)
|
||||
echo "BADGE_COLOR=696969" >> $GITHUB_ENV
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Create Test Badge
|
||||
uses: emibcn/badge-action@v1.2.4
|
||||
with:
|
||||
label: Tests
|
||||
status: '${{ fromJSON( steps.test-results.outputs.json ).conclusion }}, Passed: ${{ fromJSON( steps.test-results.outputs.json ).formatted.stats.tests_succ }}, Skipped: ${{ fromJSON( steps.test-results.outputs.json ).formatted.stats.tests_skip }}, Failed: ${{ fromJSON( steps.test-results.outputs.json ).formatted.stats.tests_fail }}'
|
||||
color: ${{ env.BADGE_COLOR }}
|
||||
path: tests.svg
|
||||
|
||||
- name: Create Coverage Badge
|
||||
id: jacoco
|
||||
uses: cicirello/jacoco-badge-generator@v2
|
||||
with:
|
||||
jacoco-csv-file: build/reports/jacoco/test/jacocoTestReport.csv
|
||||
badges-directory: null
|
||||
intervals: 95 80 70 60 50 0
|
||||
|
||||
- name: Upload Test Badge
|
||||
uses: exuanbo/actions-deploy-gist@v1
|
||||
with:
|
||||
token: ${{ secrets.GIST_TOKEN }}
|
||||
gist_id: 280257cd19cbe1dda3789bebd4ff65cf
|
||||
file_path: tests.svg
|
||||
|
||||
- name: Upload Coverage Badge
|
||||
uses: exuanbo/actions-deploy-gist@v1
|
||||
with:
|
||||
token: ${{ secrets.GIST_TOKEN }}
|
||||
gist_id: 280257cd19cbe1dda3789bebd4ff65cf
|
||||
file_path: jacoco.svg
|
||||
27
.github/workflows/coverage.yml
vendored
Normal file
27
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: Measure coverage
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ debug ]
|
||||
jobs:
|
||||
build:
|
||||
name: Code Quality Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: 11
|
||||
cache: 'gradle'
|
||||
- name: Make gradlew executable
|
||||
run: chmod +x ./gradlew
|
||||
- name: Build and analyze
|
||||
run: ./gradlew generateTestSource test jacocoTestReport --info -Dfull_test_suite=true
|
||||
- name: Upload to CodeCov
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: bash <(curl -s https://codecov.io/bash) -t $CODECOV_TOKEN
|
||||
39
.github/workflows/pull_build_tests_action.yml
vendored
Normal file
39
.github/workflows/pull_build_tests_action.yml
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
name: Unit Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: temurin
|
||||
java-version: 11
|
||||
cache: gradle
|
||||
|
||||
- name: Make gradlew executable
|
||||
run: chmod +x ./gradlew
|
||||
- name: Build and Test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: ./gradlew generateTestSource test jacocoTestReport --info -Dfull_test_suite=true
|
||||
|
||||
- name: Publish Test Result
|
||||
uses: EnricoMi/publish-unit-test-result-action@v2
|
||||
id: test-results
|
||||
if: always()
|
||||
with:
|
||||
junit_files: build/test-results/**/*.xml
|
||||
fail_on: nothing
|
||||
ignore_runs: true
|
||||
json_thousands_separator: .
|
||||
time_unit: milliseconds
|
||||
20
.gitignore
vendored
20
.gitignore
vendored
@ -5,9 +5,6 @@
|
||||
# Ignore Gradle GUI config
|
||||
gradle-app.setting
|
||||
|
||||
# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
|
||||
!gradle-wrapper.jar
|
||||
|
||||
# Cache of project
|
||||
.gradletasknamecache
|
||||
|
||||
@ -16,7 +13,6 @@ gradle-app.setting
|
||||
|
||||
.classpath
|
||||
.project
|
||||
gradle-wrapper.jar
|
||||
|
||||
---> Custom
|
||||
!/libs/
|
||||
@ -34,5 +30,19 @@ gradle-wrapper.jar
|
||||
/src/main/java/speiger/src/collections/doubles/*
|
||||
/src/main/java/speiger/src/collections/objects/*
|
||||
|
||||
#Generated Tests
|
||||
/src/test/java/speiger/src/testers/booleans/*
|
||||
/src/test/java/speiger/src/testers/bytes/*
|
||||
/src/test/java/speiger/src/testers/shorts/*
|
||||
/src/test/java/speiger/src/testers/chars/*
|
||||
/src/test/java/speiger/src/testers/ints/*
|
||||
/src/test/java/speiger/src/testers/longs/*
|
||||
/src/test/java/speiger/src/testers/floats/*
|
||||
/src/test/java/speiger/src/testers/doubles/*
|
||||
/src/test/java/speiger/src/testers/objects/*
|
||||
/src/test/java/speiger/src/tests/*
|
||||
|
||||
#Cache result
|
||||
/src/builder/resources/speiger/assets/collections/cache.bin
|
||||
/src/builder/resources/speiger/assets/collections/cache.bin
|
||||
/src/builder/resources/speiger/assets/testers/cache.bin
|
||||
/src/builder/resources/speiger/assets/tests/cache.bin
|
||||
|
||||
296
Changelog.md
Normal file
296
Changelog.md
Normal file
@ -0,0 +1,296 @@
|
||||
# Changelog of versions
|
||||
|
||||
### Version 0.9.0
|
||||
- Added: getFirst/getLast/removeFirst/removeLast to List.class.
|
||||
- Added: Dedicated Set toArray implementations.
|
||||
- Added: ToArray/pushTop functions to Stack.class.
|
||||
- Added: ComputeNonDefault functions which will contain the current behavior of the Compute function, while the Compute will be changed to be more java compliant!
|
||||
- Added: List.reversed, which returns a SubList that has all elements in reversed order and also inserts reversed.
|
||||
- Added: Iterators.infinite as an option that will create a Infinite Iterator based on the inputed one.
|
||||
- Added: List.indexedIterator which allows you to create a iterator with a customized iteration indecies. Useful if you want to transform lists output.
|
||||
- Added: PriorityQueue.contains is now a function
|
||||
- Added: Iterators/Async Builders now support MapToPrimitiveType function on the object variant. So more processing can be done. (Will be expanded upon later versions)
|
||||
- Fixed: SetValue wasn't working on forEach implementations.
|
||||
- Fixed: Compute functions now perform with primitives more java compliant. Meaning that getDefaultReturnValue function no longer is seen as null.
|
||||
- Fixed: Supplier was using the wrong dataType in their function name.
|
||||
- Updated: SimpleCodeGenerator 1.3.0 is now being used which allows for iterative code support.
|
||||
- Breaking Change: Map.compute/IfAbsent/Present and Map.supplyIfAbsent if the value is a primitive it will no longer accept the defaultReturnValue() as "null" if that functionality is desired then use: computeNonDefault which contains the functionality!
|
||||
|
||||
### Version 0.8.1
|
||||
- Added: getFirst/getLast/removeFirst/removeLast to List.class.
|
||||
- Added: Dedicated Set toArray implementations.
|
||||
- Added: ToArray/pushTop functions to Stack.class.
|
||||
- Added: ComputeNonDefault functions which will contain the current behavior of the Compute function, while the Compute will be changed to be more java compliant!
|
||||
- Added: List.reversed, which returns a SubList that has all elements in reversed order and also inserts reversed.
|
||||
- Added: Iterators.infinite as an option that will create a Infinite Iterator based on the inputed one.
|
||||
- Added: List.indexedIterator which allows you to create a iterator with a customized iteration indecies. Useful if you want to transform lists output.
|
||||
- Added: PriorityQueue.contains is now a function
|
||||
- Added: Iterators/Async Builders now support MapToPrimitiveType function on the object variant. So more processing can be done. (Will be expanded upon later versions)
|
||||
- Fixed: SetValue wasn't working on forEach implementations.
|
||||
- Fixed: Compute functions now perform with primitives more java compliant. Meaning that getDefaultReturnValue function no longer is seen as null.
|
||||
- Fixed: Supplier was using the wrong dataType in their function name.
|
||||
- Updated: SimpleCodeGenerator 1.3.0 is now being used which allows for iterative code support.
|
||||
|
||||
### Version 0.8.0
|
||||
- Added: getFirst/getLast/removeFirst/removeLast to Lists
|
||||
- Added: Dedicated implementations for toArray into TreeSets
|
||||
- Fixed: forEach methods in Maps now can use "setValue" functions.
|
||||
|
||||
### Version 0.8.0
|
||||
- Added: ISizeProvider interface (Optimization Helper)
|
||||
- Added: ISizeProvider into most Iterable implementations (Distinct/Filter/FlatMap/ArrayFlatMap don't support it, for obvious reasons)
|
||||
- Added: ToArray function into Iterable which uses ISizeProvider to reduce overhead of duplicating arrays.
|
||||
- Added: Functions that have the same type, Int2IntFunction as example, have now a identity function.
|
||||
- Added: Functions of a BooleanValue have now alwaysTrue/False function.
|
||||
- Added: ForEachIndexed for all Iterable implementations
|
||||
- Added: RandomGenerator support (Java17), though requires self compilation
|
||||
- Added: Optimizations for HashUtils next power of function.
|
||||
- Added: toArray() now returns a cached empty array if the collection is empty.
|
||||
- Added: toArray function for AsyncBuilder
|
||||
- Added: Modularization to the library where feature can be disabled as needed. (Requires Self-Compilation)
|
||||
- Fixed: putIfAbsent now replaces defaultValues
|
||||
- Fixed: OpenHashSet/Map and their Custom Variants no longer rely on List implementations.
|
||||
- Fixed: ObjectCopyOnWriteList.of did create a ObjectArrayList instead of the CopyOnWrite variant.
|
||||
- Removed: BooleanSet and Maps that start with a Boolean classes since they can not be used anyways.
|
||||
- Breaking Change: Function classes now use the "apply/applyAs/test" format from Java itself, instead of the "get" format. This cleans up a lot of things. But will break existing function class implementations
|
||||
- Breaking Change: Classes that used PrimitiveCollection functions now default to java functions where applicable, this is to increase compat.
|
||||
- Breaking Change: Some function classes now get closer to javas terms. (Predicate/UnaryOperator etc)
|
||||
|
||||
### Version 0.7.0
|
||||
- Added: Over 11 Million Unit Tests to this library to ensure quality.
|
||||
- Added: ArrayList size constructor now throws IllegalStateException if the size parameter is negative
|
||||
- Added: EnumMap specialized forEach implementation.
|
||||
- Added: AbstractMap.remove now delegates to its primitive counterpart.
|
||||
- Added: ConcurrentHashMap now implements ITrimmable
|
||||
- Refactor: Removed a lot of disabled code from ArraySet.
|
||||
- Removed: LinkedList.addAll(index, List) now delegates to LinkedList.addAll(index, Collection) due to no special optimization required.
|
||||
- Fixed: AbstractList.SubList.get/set/swapRemove didn't calculate their List index Properly
|
||||
- Fixed: AbstractList.SubList chains now properly if you create SubLists within SubLists.
|
||||
- Fixed: AbstractList.Iterator.add now respects Immutable/UnmodifiableLists.
|
||||
- Fixed: AbstractList.Iterator.skip/back now keep track of the last returned value for remove function to work properly.
|
||||
- Fixed: CopyOnWriteArrayList.extract/removeElements(int, int) does now proper range checks and remove elements properly.
|
||||
- Fixed: CopyOnWriteArrayList.SubList now works properly. (Reimplemented entirely)
|
||||
- Fixed: CopyOnWriteArrayList.Iterator.previous() was returning the wrong values.
|
||||
- Fixed: CopyOnWriteArrayList.Iterator.skip now skips the right amount of elements and stops where it should.
|
||||
- Fixed: LinkedList.first/last/dequeue/dequeueLast now throws NoSuchElementException when empty instead of IllegalStateException.
|
||||
- Fixed: LinkedList had an edge case where the entire reverse iterator would break if the wrong element was removed.
|
||||
- Fixed: LinkedList.extractElement now returns the correct values.
|
||||
- Fixed: AbstractMap.entrySet().remove(Object) now returns true if defaultReturnValue elements were removed.
|
||||
- Fixed: ConcurrentHashMap.remove(Object, Object) checks if the type matches before comparing against null Values.
|
||||
- Fixed: LinkedHashMap.clearAndTrim() was checking the wrong value for determining the full reset or clearing of a Map.
|
||||
- Fixed: HashMap.trim/clearToTrim() was using the wrong value to determin if something should be done.
|
||||
- Fixed: HashMap now compares empty values (0) against nullKeys when Object Variants of the type are used.
|
||||
- Fixed: ImmutableMap now compares empty values (0) against nullKeys when Object Variants of the type are used.
|
||||
- Fixed: ArrayMap.iterator(key) now throws NoSuchElementException when the element wasn't found.
|
||||
- Fixed: Linked/EnumMap array constructor was creating the wrong size values array.
|
||||
- Fixed: LinkedEnumMap.getAndMoveToFirst/Last was moving elements even if the element wasn't present.
|
||||
- Fixed: AVL/RBTreeMap.getFirst/LastKey was not throwing a NoSuchElementException if the map was empty.
|
||||
- Fixed: Map.Builder wasn't throwing a IllegalStateException when creating a negative size builder.
|
||||
- Fixed: AVL/RBTreeSet.DecendingSet.subSet(from, fromInclusive, to, toInclusive) was creating a corrupt asending subset.
|
||||
- Fixed: ArraySet throws now a IllegalStateException when trying to create it with a negative size.
|
||||
- Fixed: ArraySet.addMoveToLast(key) was crashing when a key was already present.
|
||||
- Fixed: Immutable/LinkedHashSet now keep track of their iteration index properly.
|
||||
- Fixed: LinkedHashSet.moveToFirst/Last(key) would crash if the Set was empty.
|
||||
- Fixed: LinkedHashSet.clearAndTrim() was checking the wrong value for determining the full reset or clearing of a Map.
|
||||
- Fixed: HashSet.trim/clearToTrim() was using the wrong value to determin if something should be done.
|
||||
|
||||
|
||||
### Version 0.6.2
|
||||
- Added: Array only sorting function return the inputed array. This was done to allow for static final references to use the method in one go without having to make lambda wrappers. Cleaner code.
|
||||
- Added: Iterator Wrappers are now a bit more in Compliance with Java Standards.
|
||||
- Added: AsyncBuilders now Support Array Inputs to create cleaner code.
|
||||
- Changed: LinkedList.addBulk variable definition was triggering a false positive.
|
||||
- Fixed: TreeMap.subMap().entrySet().remove() wouldn't check primitives properly.
|
||||
- Fixed: SortedMap.sub/tail/headMap were looping into themselves.
|
||||
- Fixed: AbstractCollection.retainAll didn't push removed values through the consumer.
|
||||
- Fixed: AbstractCollection.toArray wouldn't reset the last entry if the input array was larger then the elements in the collection.
|
||||
- Fixed: SubList didn't check for ranges properly or didn't use parent list to validate changes.
|
||||
- Fixed: ArrayList.addElements didn't check input array fully and used the wrong variable to move the elements around.
|
||||
- Fixed: LinkedList.addElements(EmptyInput) would crash.
|
||||
- Fixed: LinkedList.swapRemove didn't account for if the removed element was the prelast one.
|
||||
- Fixed: LinkedList.removeElements would break the implementation if the list was almost empty and the middle element was removed.
|
||||
- Fixed: LinkedHashSet.addAndMoveToFirst wouldn't move elements to the first place.
|
||||
- Fixed: ArrayList/LinkedList extractElements crashing when 0 or less elements are desired.
|
||||
- Fixed: TreeMap pollFirst/LastKey should return the defaultMin/max value instead of a Empty value.
|
||||
- Fixed: TreeMap keySet implementation was missing the class type implementations to pass keySet tests.
|
||||
- Fixed: TreeMap.SubMap Iterator (primitive Keys) was crashing because a Null was set on to a primitive.
|
||||
|
||||
|
||||
### Version 0.6.1
|
||||
- Fixed: FIFO queue crashing when the last index is before the first index when peek is called.
|
||||
- Fixed: FIFO queue only clears the array if it was in use.
|
||||
- Added: Sorted Method for the stream replacing functions.
|
||||
|
||||
### Version 0.6.0
|
||||
- Added: addOrGet for sets.
|
||||
- Added: Async API which allows to easily execute Iterables/Collections offthread without the complexity.
|
||||
- Added: CopyOnWriteArrayList and tests for it
|
||||
- Added: Support up to Java17.
|
||||
- Added: Build System now adds module-info if the Running JVM is 9 or higher
|
||||
- Added: ArrayList.of(Class, size) that allows you to allocate a size right at the creation of the List without having to create a wrapper array.
|
||||
- Added: A ConcurrentHashMap implementation.
|
||||
- Fixed: containsValue in the HashMap wouldn't check the nullKey
|
||||
- Removed: Deprecated functions from SortedMaps/Sets
|
||||
|
||||
### Version 0.5.3
|
||||
- Added: OrderedMap/Set
|
||||
- Added: Deprecation to Functions that are specific to Ordered interfaces in the SortedMap/Set
|
||||
- Added: subFrom to Maps which is the counterpart of the addTo method
|
||||
- Added: pourAsList and pourAsSet (booleans excluded for sets) to Iterable
|
||||
- Fixed: ArrayList.grow had a small bug where it would trigger to early causing performance problems with exact sized collections.
|
||||
- Fixed: FIFOQueue size constructor had a small bug where it would trigger a array enlargement when all elements were inserted.
|
||||
|
||||
### Version 0.5.2
|
||||
- Fixed: Bugs with Queues starting with the wrong size
|
||||
- Fixed: ArrayGrowth for Queues was +1 instead of +50%
|
||||
- Added: Benchmarks with java and FastUtil
|
||||
|
||||
### Version 0.5.1
|
||||
- Fixed: Reworked the NavigableSet/Map implementations of RBTree/AVLTree/Array Sets/Maps so they are now deemed stable.
|
||||
- Added: Another 150k Unit tests.
|
||||
- Added: List and Set Unit tests for Integer (or Primitives in this case) to ensure basic stability there. (Now covering all sets and lists)
|
||||
- Fixed: Bugs with null values for primitive collections.
|
||||
- Removed: ArraySet/Map subSet/subMap implementation was removed.
|
||||
|
||||
### Version 0.5.0
|
||||
- Added: 2 Helper functions to find out how many bits are required to store a Number.
|
||||
- Added: pour function directly into Iterable which allows to collect all elements in the Iterable directly.
|
||||
- Added: The new ToArray method from Java9 and newer into the library. Using a functional interface. (Just a backport)
|
||||
- Changed: Reworked how the Map Builder functions are created. They are now in a SubClass that moves them out of the way. Less Clutter. (This might break things if that was used before)
|
||||
- Added: Map Builder that allows now to Build Maps like Guava ImmutableMaps can be build. Note: This has a slight performance overhead.
|
||||
- Added: Unmodifiable and Synchronize wrapper functions direclty into Collection Interfaces. This is mostly a quality of life thing.
|
||||
- Added: Unmodifiable and Synchronized Wrapper Collections can now be cloned. They clone the underlying map which doesn't break functionality. (Had a usecase for it)
|
||||
- Added: A boxed putAll array variant.
|
||||
- Fixed: EnumMaps didn't keep track of their size and now got proper care and implementations as needed. There might be more work required but at least the core functionality is now up to date.
|
||||
- Added: Tests for the new Stream replace functions to ensure no bugs are left.
|
||||
- Fixed: Custom HashSet reduce function with a default value was checking incorrectly for present keys.
|
||||
- Added: Guava TestSuit
|
||||
- Fixed: HashCode and toString method would crash if the Object Key/Value was null
|
||||
- Added: AbstractTypeCollection now delegates the contains check to type-specific Collections if it detects it.
|
||||
- Fixed: Map.Entry toString wasn't writing values not like it should do.
|
||||
- Fixed: Set.hashCode now is the sum of the elements instead of a Unique HashCode based on the elements.
|
||||
- Fixed: Added missing NonNull Checks.
|
||||
- Fixed: Custom/OpenHashMap.containsValue implementation was wrong.
|
||||
- Fixed: Custom/OpenHashMap.compute/present/absent now works how it is specified in the Java Documentation
|
||||
- Fixed: Custom/OpenHashMap.merge/BulkMerge now works how it is specified in the Java Documentation
|
||||
- Fixed: Custom/Linked/OpenHashMap.keySet.remove was causing a infinite loop.
|
||||
- Fixed: Custom/Linked/OpenHashMap.entrySet.contains was not correctly comparing the entry.
|
||||
- Fixed: Custom/OpenHashMap.mapIterator now no longer crashes in certain cases.
|
||||
- Added: Custom/LinkedOpenHashMap now takes use of the improved Iterator it has for containsValue
|
||||
- Fixed: CustomOpenHashMap.keySet.forEach was basically putting out keys even if they were present
|
||||
- Fixed: ImmutableMaps issues thanks to the tests. Roughly the same as the rest of the maps
|
||||
- Fixed: RB/AVLTreeMaps issues. Roughly the same as the rest of the maps
|
||||
- Fixed: SubLists are now properly implemented.
|
||||
- Fixed: HashSet Iterator bugs now fixed... That was Painful.
|
||||
- Added: Tests for Lists and Sets
|
||||
|
||||
### Version 0.4.5
|
||||
- Added: removeAll/retainAll(Collection c, Consumer r) which receives all the elements that got deleted from the collection
|
||||
- Fixed: Supplier get function wasn't referencing original function.
|
||||
- Added: addIfPresent/Absent to lists
|
||||
- Added: distinct, limit and peek iterators
|
||||
- Added: Iterable's can now reduce its contents
|
||||
- Added: Better ForEach support for IterableWrappers so a Iterator chain is not created
|
||||
- Added: SwapRemove to Lists which moves the last element into the desired space to be deleted
|
||||
- Added: More Test cases
|
||||
|
||||
### Version 0.4.4
|
||||
- Fixed: ObjectArrayList.of was causing crashes because of a Poor implementation.
|
||||
- Added: Unsorted HashMaps/Sets now throw Concurrent exceptions if they were modified during a rehash.
|
||||
- Added: Array/Collection version of enqueue and enqueueFirst to PriorityQueues.
|
||||
- Added: fillBuffer function into PrimitiveLists which allow to optimize JavaNio buffers if needed.
|
||||
|
||||
### Version 0.4.3
|
||||
- Added: Wrapper now support the Optimized Lambda replacer functions to improve performance.
|
||||
- Added: FIFO Queue has now a minimum capacity and that is now checked more consistently.
|
||||
|
||||
### Version 0.4.2
|
||||
- Added: Lists/Sets/Maps/PriorityQueues are now copy-able. with the new copy() function.
|
||||
Note: subLists/subMaps/subSets or synchronize/unmodifyable wrappers do not support that function.
|
||||
- Fixed: PriorityQueues didn't implement: hashCode/equals/toString
|
||||
|
||||
### Version 0.4.1
|
||||
- Changed: ForEach with input now provides input, value instead of value, input, this improves the usage of method references greatly
|
||||
- Added: addAll with Array-types in collections.
|
||||
- Added: Java Iterator/Iterable support for Stream replacing methods
|
||||
- Added: Suppliers.
|
||||
- Added: SupplyIfAbsent. It is ComputeIfAbsent but using suppliers
|
||||
- Added: Count feature into Iterable
|
||||
- Fixed: A couple bugs with the new StreamReplacing functions in LinkedCollections Iterating to Infinity
|
||||
|
||||
### Version 0.4.0
|
||||
- Changed: Iterable specific helper functions were moved out of Iterators and moved into Iterables
|
||||
- Added: New Stream replacing functions: findFirst, matchesAny/All/None
|
||||
- Fixed: Compute/ComputeIfAbsent/ComputeIfPresent/Merge/BulkMerge in maps now behave like they should.
|
||||
- Added: Implementations for New Stream replacing functions.
|
||||
- Changed: Removed a lot of duplicated forEach implementations
|
||||
- Added: Flat/Mapping functions (to object) are now accessible to primitive maps.
|
||||
- Added: Filter function to Iterators/Iterables (Iterable implements it by default)
|
||||
- Changed: Cleanup of some variables/mappers
|
||||
- Added/Fixed: AVL/RBTreeMap got reworked and SubMaps work more properly now. Also forEach support got improved a lot
|
||||
- Added/Fixed: TreeSubSets (RB/AVL) got their functional implementations improved too.
|
||||
- Added: Pairs are now a thing. In Mutable/Immutable Form
|
||||
|
||||
### Version 0.3.6
|
||||
- Fixed: addAll non Type Specific Lists was causing crashes.
|
||||
- Fixed/Changed: clearAndTrim's implementation was all over the place. In some cases causing crash scenarios.
|
||||
- Fixed: Wrappers didn't implement toString/equals/hashCode
|
||||
- Added: Tests for addAll Bug
|
||||
- Changed: Cleaned up CodeStyle as bugs were fixed.
|
||||
|
||||
### Version 0.3.5
|
||||
- Fixed: Simple Code Generator dependency was declared wrong. Its only needed for runtime. Not for Compilation.
|
||||
- Fixed: ObjectLists Crashed when a null was provided as a Comparator. (Unless the List was Initialized with the ClassType)
|
||||
- Fixed: LinkedLists didn't implement add(Object)
|
||||
- Fixed: Object Collections did have the JavaCollections deprecated as the Constructor. This should only be deprecated for Primitives
|
||||
- Added: Tests with 5k Random names for Object sorting.
|
||||
- Changed: Object Arrays no longer require a Comparable[] it just assumes now that the elements in the Array are Comparable
|
||||
- Fixed: Dependency to SimpleCodeGenerator should be no longer a thing. Because the resulting library doesn't need it only the builder does.
|
||||
|
||||
### Version 0.3.4
|
||||
- Fixed: ArrayLists didn't resize properly if they were empty.
|
||||
|
||||
|
||||
### Version 0.3.3
|
||||
- Added: Flat/Mapping function for Iterables/Iterators to help avoid streams for cleaner looking code
|
||||
- Fixed: AVLTrees pollFirst/Last is now keeping orders and is fixed
|
||||
- Fixed: AbstractCollection bulk adding methods now link to the specialized implementations.
|
||||
- Fixed: A bug with getElements in ArrayList.
|
||||
- Fixed: PriorityQueue remove/toArray function were renamed so they fit better with other interfaces. (remove => removeFirst and toArray uses a different genericType)
|
||||
- Added: LinkedList which is a List/PriorityDequeue/Stack which allows for more optimized use-cases and reduced boxing/unboxing.
|
||||
- Added: Tests for LinkedList
|
||||
|
||||
### Version 0.3.2
|
||||
- Fixed: Map.put wasn't referring to primitive variants.
|
||||
- Added: ImmutableList.
|
||||
- Added: Iterator pour function into a List or Array
|
||||
- Changed: Arrays Wrap is now accessible to Objects and now is ? extends TYPE instead of TYPE.
|
||||
- Added: OpenHashSets now implement foreach and have less overhead.
|
||||
- Added: ImmutableOpenHashSet that is not editable (is linked by default for fast iteration)
|
||||
- Added: CustomOpenHashSets now implement foreach and have less overhead.
|
||||
- Added: ImmutableOpenHashMap that is not editable (is linked by default for fast iteration)
|
||||
- Added: Maps can now be created through the interface.
|
||||
- Fixed: Lists.addElements(T...elements) was adding elements at the beginning of a list instead of the end.
|
||||
- Fixed: Bugs with the AVLTreeSet. And marked bugs with AVLTreeX that are still present.
|
||||
|
||||
### Version 0.3.1
|
||||
- Fixed: containsKey & containsValue in HashMaps were deprecated for Object Variants.
|
||||
- Fixed: HashMap wasn't deleting Keys & Values references when removing a Object
|
||||
- Fixed: AVLTreeMap didn't balance properly.
|
||||
- Changed: EnumMap no longer tries to access SharedSecrets since its gone in java11
|
||||
- Added: HashMaps now implement ITrimmable
|
||||
- Added: AVLTreeSet didn't balance properly
|
||||
- Fixed: HashMaps & LinkedMaps weren't clearing references properly.
|
||||
|
||||
### Version 0.3.0 (Breaking 0.2.0)
|
||||
- Added: Stack.isEmpty was missing
|
||||
- Changed: remove/removeLast/enqueue/enqueueFirst no longer use Type Suffixes
|
||||
- Removed: Suffixes for unmodifiable & synchronize functions.
|
||||
- Changed: Primitive Stacks no longer depend on the base Stack class. Because seriously not needed.
|
||||
- Changed: PriorityQueues no longer extends Object Variant.
|
||||
- Changed: Maps.get function is no longer using Suffixes unless its absolutely necessary.
|
||||
- Changed: Maps.remove function is no longer using Suffixes unless its absolutely necessary.
|
||||
- Changed: ObjectList methods are no longer marked Deprecated even so it was for primitive ones.
|
||||
- Added: Shuffle & Reverse Methods.
|
||||
- Added: Concat Iterators.
|
||||
- Added: PriorityQueues
|
||||
44
EXTRAS.md
Normal file
44
EXTRAS.md
Normal file
@ -0,0 +1,44 @@
|
||||
### Extra Features
|
||||
|
||||
Primitive Collections comes with a few extra features that are disabled by default.
|
||||
These will be enabled as soon they become relevant or never at all.
|
||||
|
||||
But some of these can be already unlocked when the target version changes.
|
||||
|
||||
If you compile the library for yourself you will automatically gain access to said features.
|
||||
|
||||
### Java17 Exclusive Features
|
||||
Java17 has some new features that can sadly not really be back-ported but the library still supports them if it is compiled with java17
|
||||
|
||||
- RandomGenerator: Java17 has added [RandomGenerator.class](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/random/RandomGenerator.html).
|
||||
This allows to use custom random implementations without having to re-implement them yourselves.
|
||||
|
||||
|
||||
### ModuleSettings
|
||||
Primitive Collections is a huge library.
|
||||
But maybe you only use like 5-10 different classes.
|
||||
Normally you would use tools like "Proguard" to get rid of classes that you don't use.
|
||||
But since a lot of classes have dependencies on each other this would only do so much.
|
||||
|
||||
This is where the [ModuleSettings](ModuleSettings.json) come into play.
|
||||
It allows you to turn of implementations as you wish and adjusts the code that everything still works.
|
||||
|
||||
There is 3 layers of control inside of the ModuleSettings.
|
||||
- Modules directly at the top that turn off everything.
|
||||
- Type Specific configurations, where you can for example turn of everything thats "Long" based.
|
||||
- And then there is each type specific module settings.
|
||||
|
||||
Allowing for greater control without having to edit hundreds of lines of code.
|
||||
On top of that:
|
||||
Any Setting that isn't "Present" can be defined as "Enabled" or "Disabled" using the "Default" argument.
|
||||
If "Default" is missing, then it will just default to "Enabled".
|
||||
So if you want to disable just 1 thing you can keep that 1 thing and delete the rest of the Setting.
|
||||
It will still work as the same.
|
||||
The default settings just come with everything so you can see what is controllable.
|
||||
Note: If a global Module setting is disabled but a dependency needs said Module, it will enable only the required classes.
|
||||
If a Module type (Float-Collection as example) is specifically disabled, the Dependency Resolver will throw errors telling you whats wrong.
|
||||
|
||||
How to compile the Code with the ModuleSettings enabled:
|
||||
```
|
||||
/gradlew.bat generateLimitSource build -x test
|
||||
```
|
||||
343
LICENSE
343
LICENSE
@ -1,208 +1,201 @@
|
||||
Apache License
|
||||
Apache License
|
||||
Version 2.0, December 2021
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Version 2.0, January 2021
|
||||
|
||||
http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION,
|
||||
AND DISTRIBUTION
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and distribution
|
||||
as defined by Sections 1 through 9 of this document.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright
|
||||
owner that is granting the License.
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities
|
||||
that control, are controlled by, or are under common control with that entity.
|
||||
For the purposes of this definition, "control" means (i) the power, direct
|
||||
or indirect, to cause the direction or management of such entity, whether
|
||||
by contract or otherwise, or (ii) ownership of fifty percent (50%) or more
|
||||
of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions
|
||||
granted by this License.
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including
|
||||
but not limited to software source code, documentation source, and configuration
|
||||
files.
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation
|
||||
or translation of a Source form, including but not limited to compiled object
|
||||
code, generated documentation, and conversions to other media types.
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form,
|
||||
made available under the License, as indicated by a copyright notice that
|
||||
is included in or attached to the work (an example is provided in the Appendix
|
||||
below).
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form,
|
||||
that is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an
|
||||
original work of authorship. For the purposes of this License, Derivative
|
||||
Works shall not include works that remain separable from, or merely link (or
|
||||
bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version
|
||||
of the Work and any modifications or additions to that Work or Derivative
|
||||
Works thereof, that is intentionally submitted to Licensor for inclusion in
|
||||
the Work by the copyright owner or by an individual or Legal Entity authorized
|
||||
to submit on behalf of the copyright owner. For the purposes of this definition,
|
||||
"submitted" means any form of electronic, verbal, or written communication
|
||||
sent to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems, and
|
||||
issue tracking systems that are managed by, or on behalf of, the Licensor
|
||||
for the purpose of discussing and improving the Work, but excluding communication
|
||||
that is conspicuously marked or otherwise designated in writing by the copyright
|
||||
owner as "Not a Contribution."
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
|
||||
of whom a Contribution has been received by Licensor and subsequently incorporated
|
||||
within the Work.
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this
|
||||
License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||
no-charge, royalty-free, irrevocable copyright license to reproduce, prepare
|
||||
Derivative Works of, publicly display, publicly perform, sublicense, and distribute
|
||||
the Work and such Derivative Works in Source or Object form.
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License,
|
||||
each Contributor hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||
no-charge, royalty-free, irrevocable (except as stated in this section) patent
|
||||
license to make, have made, use, offer to sell, sell, import, and otherwise
|
||||
transfer the Work, where such license applies only to those patent claims
|
||||
licensable by such Contributor that are necessarily infringed by their Contribution(s)
|
||||
alone or by combination of their Contribution(s) with the Work to which such
|
||||
Contribution(s) was submitted. If You institute patent litigation against
|
||||
any entity (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||
that the Work or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses granted to You
|
||||
under this License for that Work shall terminate as of the date such litigation
|
||||
is filed.
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or
|
||||
Derivative Works thereof in any medium, with or without modifications, and
|
||||
in Source or Object form, provided that You meet the following conditions:
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
(a) You must give any other recipients of the Work or Derivative Works a copy
|
||||
of this License; and
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices stating that
|
||||
You changed the files; and
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works that You distribute,
|
||||
all copyright, patent, trademark, and attribution notices from the Source
|
||||
form of the Work, excluding those notices that do not pertain to any part
|
||||
of the Derivative Works; and
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its distribution,
|
||||
then any Derivative Works that You distribute must include a readable copy
|
||||
of the attribution notices contained within such NOTICE file, excluding those
|
||||
notices that do not pertain to any part of the Derivative Works, in at least
|
||||
one of the following places: within a NOTICE text file distributed as part
|
||||
of the Derivative Works; within the Source form or documentation, if provided
|
||||
along with the Derivative Works; or, within a display generated by the Derivative
|
||||
Works, if and wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and do not modify the
|
||||
License. You may add Your own attribution notices within Derivative Works
|
||||
that You distribute, alongside or as an addendum to the NOTICE text from the
|
||||
Work, provided that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
Copyright 2021 Speiger
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide
|
||||
additional or different license terms and conditions for use, reproduction,
|
||||
or distribution of Your modifications, or for any such Derivative Works as
|
||||
a whole, provided Your use, reproduction, and distribution of the Work otherwise
|
||||
complies with the conditions stated in this License.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise, any
|
||||
Contribution intentionally submitted for inclusion in the Work by You to the
|
||||
Licensor shall be under the terms and conditions of this License, without
|
||||
any additional terms or conditions. Notwithstanding the above, nothing herein
|
||||
shall supersede or modify the terms of any separate license agreement you
|
||||
may have executed with Licensor regarding such Contributions.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names,
|
||||
trademarks, service marks, or product names of the Licensor, except as required
|
||||
for reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to
|
||||
in writing, Licensor provides the Work (and each Contributor provides its
|
||||
Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied, including, without limitation, any warranties
|
||||
or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR
|
||||
A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness
|
||||
of using or redistributing the Work and assume any risks associated with Your
|
||||
exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether
|
||||
in tort (including negligence), contract, or otherwise, unless required by
|
||||
applicable law (such as deliberate and grossly negligent acts) or agreed to
|
||||
in writing, shall any Contributor be liable to You for damages, including
|
||||
any direct, indirect, special, incidental, or consequential damages of any
|
||||
character arising as a result of this License or out of the use or inability
|
||||
to use the Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all other commercial
|
||||
damages or losses), even if such Contributor has been advised of the possibility
|
||||
of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work
|
||||
or Derivative Works thereof, You may choose to offer, and charge a fee for,
|
||||
acceptance of support, warranty, indemnity, or other liability obligations
|
||||
and/or rights consistent with this License. However, in accepting such obligations,
|
||||
You may act only on Your own behalf and on Your sole responsibility, not on
|
||||
behalf of any other Contributor, and only if You agree to indemnify, defend,
|
||||
and hold each Contributor harmless for any liability incurred by, or claims
|
||||
asserted against, such Contributor by reason of your accepting any such warranty
|
||||
or additional liability. END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate
|
||||
notice, with the fields enclosed by brackets "[]" replaced with your own identifying
|
||||
information. (Don't include the brackets!) The text should be enclosed in
|
||||
the appropriate comment syntax for the file format. We also recommend that
|
||||
a file or class name and description of purpose be included on the same "printed
|
||||
page" as the copyright notice for easier identification within third-party
|
||||
archives.
|
||||
|
||||
Copyright 2021 Speiger
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
||||
See the License for the specific language governing permissions and
|
||||
|
||||
limitations under the License.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
2625
ModulSettings.json
Normal file
2625
ModulSettings.json
Normal file
File diff suppressed because it is too large
Load Diff
101
README.md
101
README.md
@ -1,22 +1,93 @@
|
||||
# Primitive-Collections (To be Renamed)
|
||||

|
||||
[](https://jitpack.io/#Speiger/Primitive-Collections)
|
||||

|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||

|
||||

|
||||

|
||||
[](https://codecov.io/gh/Speiger/Primitive-Collections)
|
||||

|
||||
# Primitive-Collections
|
||||
This is a Simple Primitive Collections Library aimed to outperform Java's Collection Library and FastUtil.
|
||||
Both in Performance and Quality of Life Features.
|
||||
|
||||
This is a Simple Primitive Collections Library i started as a hobby Project.
|
||||
It is based on Java's Collection Library and FastUtil.
|
||||
But its focus is a different one.
|
||||
## Benchmarks
|
||||
Benchmarks can be found here: [[Charts]](https://github.com/Speiger/Primitive-Collections-Benchmarks/blob/master/BENCHMARKS-CHARTS.md), [[Tables]](https://github.com/Speiger/Primitive-Collections-Benchmarks/blob/master/BENCHMARKS.md)
|
||||
|
||||
## Main Features:
|
||||
ArraysList, HashSet/Map (Linked & HashControl), TreeSet/Map (RB & AVL), Priority Queue.
|
||||
## Special Features
|
||||
[Here](features.md) you find a set of features added to Primitive Collections.
|
||||
These are designed to improve performance or to provide Quality of Life.
|
||||
|
||||
[Here](EXTRAS.md) you also find features that can be used when you compile the library for yourself.
|
||||
These features are not used by default to have a wider range of compat, or require self compilation.
|
||||
Such as pruning classes that are not needed in your code.
|
||||
|
||||
## Main Features:
|
||||
- ArrayLists / LinkedLists / CopyOnWriteLists
|
||||
- HashSets/Maps (Linked & HashControl)
|
||||
- TreeSets/Maps (RB & AVL)
|
||||
- EnumMaps
|
||||
- Immutable Maps/Lists/Sets
|
||||
- ConcurrentHashMaps
|
||||
- Priority Queues
|
||||
- Streams & Functional Queries
|
||||
- Split/Iterators
|
||||
- Pairs
|
||||
- Unary/Functions
|
||||
- Suppliers
|
||||
- Bi/Consumers
|
||||
- AsyncBuilders
|
||||
|
||||
# Notes about Versions
|
||||
Any 0.x.0 version (Minor) can be reason for massive changes including API.
|
||||
To ensure that problems can be dealt with even if it is breaking the current API.
|
||||
|
||||
# How to install
|
||||
Using Jitpack Gradle
|
||||
```groovy
|
||||
repositories {
|
||||
maven {
|
||||
url = "https://jitpack.io"
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
implementation 'com.github.Speiger:Primitive-Collections:0.9.0'
|
||||
}
|
||||
```
|
||||
|
||||
Using Maven Central
|
||||
```groovy
|
||||
dependencies {
|
||||
implementation 'io.github.speiger:Primitive-Collections:0.9.0'
|
||||
}
|
||||
```
|
||||
|
||||
# SourceCode
|
||||
The generated Sourcecode can be automatically build,
|
||||
but if you want to just browse around in it.
|
||||
Check out the [Debug Branch](https://github.com/Speiger/Primitive-Collections/tree/debug/src/main/java/speiger/src/collections), which has the entire up to date code.
|
||||
|
||||
# Contributing
|
||||
If you want to contribute.
|
||||
This project is created using gradle and java and my Template Library only. Nothing extra.
|
||||
If you setup gradle the library will be downloaded automatically.
|
||||
|
||||
Where is everything stored?
|
||||
- Variables and ClassNames are define [here](src/builder/java/speiger/src/builder/GlobalVariables.java)
|
||||
- Templates are stored [here](src/builder/resources/speiger/assets/collections/templates)
|
||||
- Tests can be found [here](src/test/java/speiger/src/collections)
|
||||
|
||||
Please if you want to contribute follow the [Rule-Sheet](RuleSheet.md). It keeps everything in line.
|
||||
|
||||
|
||||
# Guide
|
||||
# How to Build
|
||||
|
||||
The SourceCode can be already generated via:
|
||||
The SourceCode can be generated via:
|
||||
```
|
||||
/gradlew.bat generateSource
|
||||
to build the jar
|
||||
/gradlew.bat build
|
||||
do not combine the commands because they can not be executed at the same time.
|
||||
```
|
||||
|
||||
## Current Down Sides (Random order)
|
||||
- EnumMaps are only normal maps no Linked Support yet
|
||||
- Testing for Sub Maps/Sets/Lists are only in a very basic way tested
|
||||
- Documentation is only present at the lowest level for most cases and needs a typo fixing.
|
||||
to generate SourceCode and build the jar:
|
||||
```
|
||||
/gradlew.bat build
|
||||
```
|
||||
|
||||
15
SECURITY.md
Normal file
15
SECURITY.md
Normal file
@ -0,0 +1,15 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Due to how the releases work, only the latest versions will be supported.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you have discovered a security vulnerability in this project, please report it privately.
|
||||
**Please refrain from posting in public issues.**
|
||||
This gives me time to process issues that are being found, and reduces the possiblity of abuse while patches are being created.
|
||||
Please disclose it [here](https://github.com/Speiger/Primitive-Collections/security/advisories/new).
|
||||
|
||||
Please consider that this project is developed by a single person.
|
||||
So please provide a reasonable timeframe when reporting.
|
||||
350
build.gradle
350
build.gradle
@ -1,5 +1,7 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
id "jacoco"
|
||||
// id "com.vanniktech.maven.publish" version "0.28.0"
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile) {
|
||||
@ -8,18 +10,27 @@ tasks.withType(JavaCompile) {
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'eclipse'
|
||||
apply plugin: 'maven-publish'
|
||||
apply plugin: 'signing'
|
||||
|
||||
repositories {
|
||||
jcenter()
|
||||
flatDir {
|
||||
dirs 'libs'
|
||||
mavenCentral()
|
||||
maven {
|
||||
url = "https://maven.speiger.com/repository/main"
|
||||
}
|
||||
}
|
||||
|
||||
archivesBaseName = 'Primitive Collections'
|
||||
version = 'Beta';
|
||||
version = RELEASE_VERSION;
|
||||
|
||||
sourceCompatibility = targetCompatibility = compileJava.sourceCompatibility = compileJava.targetCompatibility = '1.8'
|
||||
sourceCompatibility = targetCompatibility = compileJava.sourceCompatibility = compileJava.targetCompatibility = JavaVersion.current();
|
||||
|
||||
System.out.println("Java Version: "+compileJava.sourceCompatibility)
|
||||
|
||||
java {
|
||||
withJavadocJar()
|
||||
withSourcesJar()
|
||||
}
|
||||
|
||||
javadoc {
|
||||
options.tags = [ "implSpec", "note" ]
|
||||
@ -29,21 +40,9 @@ eclipse {
|
||||
classpath {
|
||||
downloadJavadoc = true
|
||||
downloadSources = true
|
||||
file {
|
||||
whenMerged {
|
||||
//Enforce a custom container and allowing access to the sun.misc package which is nessesary for EnumMaps
|
||||
entries.find{ it.kind == 'con' && it.path.startsWith('org.eclipse.jdt')}.path = 'org.eclipse.jdt.launching.JRE_CONTAINER';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
compileJava {
|
||||
options.compilerArgs << '-XDignore.symbol.file'
|
||||
options.fork = true // may not needed on 1.8
|
||||
options.forkOptions.executable = 'javac' // may not needed on 1.8
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
builder
|
||||
}
|
||||
@ -53,8 +52,11 @@ configurations {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile 'SimpleCodeGenerator:Simple Code Generator:1.0.1'
|
||||
builderImplementation 'com.google.code.gson:gson:2.10'
|
||||
builderImplementation 'de.speiger:Simple-Code-Generator:1.3.0'
|
||||
testImplementation 'junit:junit:4.12'
|
||||
testImplementation 'com.google.guava:guava-testlib:31.0.1-jre'
|
||||
|
||||
}
|
||||
|
||||
task generateSource(type: JavaExec) {
|
||||
@ -64,26 +66,314 @@ task generateSource(type: JavaExec) {
|
||||
main = 'speiger.src.builder.PrimitiveCollectionsBuilder'
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar) {
|
||||
from javadoc
|
||||
classifier = 'javadoc'
|
||||
task generateGithubSource(type: JavaExec) {
|
||||
group = 'internal'
|
||||
description = 'Builds the sourcecode for Github Actions'
|
||||
classpath = sourceSets.builder.runtimeClasspath
|
||||
main = 'speiger.src.builder.PrimitiveCollectionsBuilder'
|
||||
args = ['silent']
|
||||
}
|
||||
|
||||
task srcJar(type: Jar) {
|
||||
from sourceSets.main.allSource
|
||||
classifier = 'sources'
|
||||
task forceGenerateSource(type: JavaExec) {
|
||||
group = 'internal'
|
||||
description = 'Builds the sourcecode forceful'
|
||||
classpath = sourceSets.builder.runtimeClasspath
|
||||
main = 'speiger.src.builder.PrimitiveCollectionsBuilder'
|
||||
args = ['force']
|
||||
}
|
||||
|
||||
task generateTestSource(type: JavaExec) {
|
||||
group = 'internal'
|
||||
description = 'Builds the sourcecode for the Tests'
|
||||
classpath = sourceSets.builder.runtimeClasspath
|
||||
main = 'speiger.src.builder.PrimitiveCollectionsBuilder'
|
||||
args = ['tests', 'silent']
|
||||
}
|
||||
|
||||
task forceGenerateTestSource(type: JavaExec) {
|
||||
group = 'internal'
|
||||
description = 'Builds the sourcecode for the Tests'
|
||||
classpath = sourceSets.builder.runtimeClasspath
|
||||
main = 'speiger.src.builder.PrimitiveCollectionsBuilder'
|
||||
args = ['tests', 'silent', 'force']
|
||||
}
|
||||
|
||||
task generateLimitSource(type: JavaExec) {
|
||||
group = 'internal'
|
||||
description = 'Builds the Sourcecode with the ModuleSettings.json applied'
|
||||
classpath = sourceSets.builder.runtimeClasspath
|
||||
main = 'speiger.src.builder.PrimitiveCollectionsBuilder'
|
||||
args = ['silent', 'load']
|
||||
}
|
||||
|
||||
compileJava.dependsOn generateGithubSource
|
||||
|
||||
javadoc.failOnError = false
|
||||
javadoc.options.memberLevel = JavadocMemberLevel.PUBLIC
|
||||
//javadoc.options.showAll()
|
||||
javadoc.options.quiet()
|
||||
|
||||
artifacts {
|
||||
archives javadocJar
|
||||
archives srcJar
|
||||
|
||||
task testBooleans(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Boolean Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.booleans.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
}
|
||||
|
||||
task testBytes(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Byte Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.bytes.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
task testShorts(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Short Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.shorts.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
task testChars(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Character Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.chars.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
task testInts(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Int Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.ints.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
task testLongs(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Long Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.longs.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
task testFloats(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Float Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.floats.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
task testDoubles(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Double Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.doubles.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
task testObjects(type: Test) {
|
||||
group 'tests'
|
||||
description 'Tests all Object Collections'
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
includeTestsMatching "speiger.src.tests.objects.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
maxParallelForks = testThreads as Integer
|
||||
}
|
||||
|
||||
if(System.getProperty("full_test_suite", "false").toBoolean()) {
|
||||
test.dependsOn testBooleans
|
||||
test.dependsOn testBytes
|
||||
test.dependsOn testShorts
|
||||
test.dependsOn testChars
|
||||
test.dependsOn testInts
|
||||
test.dependsOn testLongs
|
||||
test.dependsOn testFloats
|
||||
test.dependsOn testDoubles
|
||||
test.dependsOn testObjects
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnit()
|
||||
}
|
||||
filter {
|
||||
excludeTestsMatching "speiger.src.testers.**.*"
|
||||
excludeTestsMatching "speiger.src.tests.**.*"
|
||||
excludeTestsMatching "tests.**.*"
|
||||
}
|
||||
useJUnit()
|
||||
ignoreFailures = true
|
||||
maxHeapSize = maxMemory
|
||||
}
|
||||
|
||||
jacocoTestReport {
|
||||
executionData fileTree(project.buildDir.absolutePath).include("jacoco/*.exec")
|
||||
reports {
|
||||
xml.required = true
|
||||
html.required = true
|
||||
csv.required = true
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
personal(MavenPublication) {
|
||||
pom {
|
||||
name = 'Primitive Collections'
|
||||
description = 'A Primitive Collection library that reduces memory usage and improves performance'
|
||||
url = 'https://github.com/Speiger/Primitive-Collections'
|
||||
version = project.version
|
||||
artifactId = project.archivesBaseName.replace(" ", "-")
|
||||
groupId = 'de.speiger'
|
||||
from components.java
|
||||
licenses {
|
||||
license {
|
||||
name = 'The Apache License, Version 2.0'
|
||||
url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
}
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id = 'speiger'
|
||||
name = 'Speiger'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
url = 'https://github.com/Speiger/Primitive-Collections'
|
||||
}
|
||||
issueManagement {
|
||||
system = 'github'
|
||||
url = 'https://github.com/Speiger/Primitive-Collections/issues'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
repositories {
|
||||
maven {
|
||||
name = "Speiger_Maven"
|
||||
def auth = System.getenv("Speiger_Maven_Auth")?.split(';');
|
||||
url version.endsWith('SNAPSHOT') ? "https://maven.speiger.com/repository/debug" : "https://maven.speiger.com/repository/main"
|
||||
credentials(PasswordCredentials) {
|
||||
username auth?[0]
|
||||
password auth?[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(PublishToMavenRepository) {
|
||||
def predicate = provider {
|
||||
(repository == publishing.repositories.mavenCentral && publication == publishing.publications.maven) ||
|
||||
(repository != publishing.repositories.mavenCentral && publication != publishing.publications.maven)
|
||||
}
|
||||
onlyIf("publishing binary to the external repository, or binary and sources to the internal one") {
|
||||
predicate.get()
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(PublishToMavenLocal) {
|
||||
def predicate = provider {
|
||||
publication == publishing.publications.personal
|
||||
}
|
||||
onlyIf("publishing binary and sources") {
|
||||
predicate.get()
|
||||
}
|
||||
}
|
||||
|
||||
//Maven central Start
|
||||
//Disabling due to java8 incompat, only needed to manually publishing anyways
|
||||
|
||||
//signing.useGpgCmd()
|
||||
//
|
||||
//import com.vanniktech.maven.publish.SonatypeHost
|
||||
//import com.vanniktech.maven.publish.JavaLibrary
|
||||
//import com.vanniktech.maven.publish.JavadocJar
|
||||
//
|
||||
//mavenPublishing {
|
||||
// configure(new JavaLibrary(new JavadocJar.None(), true))
|
||||
//}
|
||||
//
|
||||
//mavenPublishing {
|
||||
// publishToMavenCentral(SonatypeHost.CENTRAL_PORTAL)
|
||||
//
|
||||
// signAllPublications()
|
||||
// pom {
|
||||
// name = 'Primitive Collections'
|
||||
// description = 'A Primitive Collection library that reduces memory usage and improves performance'
|
||||
// url = 'https://github.com/Speiger/Primitive-Collections'
|
||||
// version = project.version
|
||||
// group = 'io.github.speiger'
|
||||
// licenses {
|
||||
// license {
|
||||
// name = 'The Apache License, Version 2.0'
|
||||
// url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// developers {
|
||||
// developer {
|
||||
// id = 'speiger'
|
||||
// name = 'Speiger'
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// scm {
|
||||
// connection = 'scm:git:git://github.com/Speiger/Primitive-Collections.git'
|
||||
// developerConnection = 'scm:git:ssh://github.com:Speiger/Primitive-Collections.git'
|
||||
// url = 'https://github.com/Speiger/Primitive-Collections'
|
||||
// }
|
||||
//
|
||||
// issueManagement {
|
||||
// system = 'github'
|
||||
// url = 'https://github.com/Speiger/Primitive-Collections/issues'
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
//
|
||||
402
features.md
Normal file
402
features.md
Normal file
@ -0,0 +1,402 @@
|
||||
## Quality Of Life Features
|
||||
New Specialized functions/classes that were added to increase performance and/or reduce allocations and/or for Quality of life.
|
||||
|
||||
|
||||
# Functions
|
||||
|
||||
Functions that increase performance or are quality of life in their nature.
|
||||
|
||||
<details>
|
||||
<summary>Iterable</summary>
|
||||
<p>
|
||||
|
||||
## Functional Functions
|
||||
|
||||
Java adds themselves a lot of functional functions like,
|
||||
- Stream:
|
||||
- Map/FlatMap
|
||||
- Filter/Distinct/Limit/Sorted
|
||||
- Count/FindFirst/Collect
|
||||
- Peek/ForEach/Reduce
|
||||
- anyMatch/allMatch/NoneMatch
|
||||
|
||||
that allows to process a collection in a functional way.
|
||||
But these require streams which have a lot of Overhead in their nature.
|
||||
|
||||
Luckly Primitive Collections adds replacement functions that provide the same functionality but with minimal overhead.
|
||||
Here are some examples:
|
||||
```java
|
||||
public ObjectIterable<Path> toPath(ObjectIterable<String> iterable) {
|
||||
return iterable.map(Paths::get).filter(Files::exist);
|
||||
}
|
||||
|
||||
public Iterable<Path> toPath(Iterable<String> iterable) {
|
||||
return ObjectIterables.map(iterable, Paths::get).filter(Files::exist);
|
||||
}
|
||||
|
||||
public int sum(IntIterable iterable) {
|
||||
return iterable.reduce(Integer::sum);
|
||||
}
|
||||
```
|
||||
|
||||
## AsyncAPI</summary>
|
||||
|
||||
The AsyncAPI is a Feature that simplifies the processing of Collections on a separate thread.
|
||||
It uses the same concept as Javas Stream API but uses the light weight Functions from Primitive Collections to achieve the same thing.
|
||||
Unlike Javas StreamAPI the AsyncAPI is always singleThreaded and more like Javas CompletableFuture, which you can await or let run Asynchronous.
|
||||
|
||||
The Goal is it to simplify the processing of Collections asynchronous.
|
||||
Especially on tasks which don't have to be finished instantly but can be processed on the side.
|
||||
|
||||
Here is a example of how the API works.
|
||||
```java
|
||||
public void processFiles(ObjectCollection<String> potentialFiles) {
|
||||
potentialFiles.asAsync()
|
||||
.map(Paths::get).filter(Files::exists) //Modifies the collection (Optional)
|
||||
.forEach(Files::delete) //Creates the action (Required)
|
||||
.callback(T -> {}} //Callback on completion, still offthread (Optional)
|
||||
.execute() //Starts the task. (Required)
|
||||
}
|
||||
```
|
||||
</p>
|
||||
</details>
|
||||
<details>
|
||||
<summary>Collection</summary>
|
||||
<p>
|
||||
|
||||
These are functions specific to the Collections interface, stuff that everyone wished it was present to be in the first place.
|
||||
|
||||
## AddAll (Array)
|
||||
Adding Elements to a Collection usually requires either a for loop or a Arrays.wrap().
|
||||
This isn't an issue with Primitive Collections.
|
||||
```java
|
||||
public void addMonths(ObjectCollection<String> months) {
|
||||
months.addAll("January", "February", "March", "April", "May", "June", "July", "August", "September, "October", November", "December");
|
||||
}
|
||||
|
||||
public void addElements(ObjectCollection<String> result, String[] elements) {
|
||||
result.addAll(elements, 0, 5); //elements, offset, length
|
||||
}
|
||||
```
|
||||
|
||||
## containsAny
|
||||
Everyone hates comparing if 2 collections have part of each other included.
|
||||
The solution usually requires for loops and keeping track if things were found or not.
|
||||
And every Java Developer had this issue at least once and wished for a clean solution.
|
||||
```java
|
||||
public boolean hasMonths(ObjectCollection<Month> target, Collection<Month> toFind) {
|
||||
return target.containsAny(toFind);
|
||||
}
|
||||
```
|
||||
|
||||
## Copy
|
||||
Collections get copied every now and then. There is only 2 ways that this happens.
|
||||
Javas Clone API or using Constructor that supports collections.
|
||||
Javas Clone API is kinda in a Zombie state, where it is supported or not. Its not really clear if you should use it or not.
|
||||
The Clone CloneNotSupportedException isn't helping either, causing more janky code.
|
||||
While a Constructor can only support so much and testing for every case isn't really viable.
|
||||
|
||||
So the decision was made to straight out not support clone and instead add a copy function which doesn't use a checked exception.
|
||||
It works exactly like the clone function. In a sense where it creates a shallow copy. (SubCollections do not work for obvious reasons)
|
||||
```java
|
||||
public IntCollection copy(IntCollection original) {
|
||||
return original.copy();
|
||||
}
|
||||
```
|
||||
|
||||
## Primitive Streams
|
||||
Since Javas Stream API is still really useful, even at its shortcomings, Primitive Collections provides easy access to it.
|
||||
Generic Streams and the closest Primitive Stream will be provided. So a FloatCollection goes to a DoubleStream.
|
||||
```java
|
||||
public IntStream createStream(IntCollection source) {
|
||||
return source.primitiveStream();
|
||||
}
|
||||
```
|
||||
|
||||
## RemoveAll/RetainAll with listener
|
||||
Ever wanted use removeAll or retainAll and wanted to know what elements actually got deleted?
|
||||
The usual solution is to create a copy and then apply it to the original and cross reference them.
|
||||
Which leads to really messy code and just hasn't a clean solution.
|
||||
Luckly Primitive Collections got you covered.
|
||||
```java
|
||||
public void removeInvalidFiles(ObjectCollections<Path> files, ObjectCollection<Path> toRemove) {
|
||||
files.removeAll(toRemove, T -> System.out.println(T));
|
||||
}
|
||||
|
||||
public void removeInvalidFiles(ObjectCollections<Path> files, ObjectCollection<Path> toKeep) {
|
||||
files.retainFiles(toKeep, T -> System.out.println(T));
|
||||
}
|
||||
```
|
||||
|
||||
## ToArray
|
||||
Primitive Collections supports primitive/generic toArray functions for its Primitive Collections.
|
||||
On top of that the Object side gets a Java9 function ported back to java8, which uses a functional Interface to create the backing array.
|
||||
```java
|
||||
public Integer[] toArray(IntCollection c) {
|
||||
return c.toArray(new Integer[c.size]);
|
||||
}
|
||||
|
||||
public int[] toArray(IntCollection c) {
|
||||
return c.toIntArray();
|
||||
}
|
||||
|
||||
public String[] toArray(ObjectCollection<String> c) {
|
||||
return c.toArray(String::new);
|
||||
}
|
||||
```
|
||||
</p>
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>List</summary>
|
||||
<p>
|
||||
|
||||
These functions are List specific functions, a couple of these are from FastUtil.
|
||||
|
||||
## add/get/remove/extractElements
|
||||
These functions really useful helper functions. 3 of which are copied from FastUtil. (extract is from Primitive Collections)
|
||||
They are basically array forms of addAll, getAll, removeRange and removeAndGetRange. This is the simplest way to describe it.
|
||||
|
||||
Here some example:
|
||||
```java
|
||||
public void addAll(DoubleList list) {
|
||||
list.addElements(0D, 12.2D, 3.5D, 4.2D);
|
||||
}
|
||||
|
||||
public double[] getAll(DoubleList list, int amount) {
|
||||
double[] result = new double[amount];
|
||||
list.getElements(0, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
public void removeRange(FloatList list) {
|
||||
list.removeElements(5, 14);
|
||||
}
|
||||
|
||||
public float[] extractRange(FloatList list) {
|
||||
return list.extractElements(5, 14); //Returns the removed elements
|
||||
}
|
||||
```
|
||||
|
||||
## addIfPresent/addIfAbsent
|
||||
These two functions are simple helper functions that check internally if a element is present or absent before adding them to the List.
|
||||
Removing the need for a contains or indexOf check every time you want to add a element.
|
||||
While it is of course better to use a set, there is cases where this is still useful.
|
||||
|
||||
```java
|
||||
public void addElements(IntList list, int... numbersToAdd) {
|
||||
for(int e : numbersToAdd) {
|
||||
list.addIfAbsent(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void addExisting(ObjectList<String> list, String... textToAdd) {
|
||||
for(String s : textToAdd) {
|
||||
list.addIfPresent(s);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## SwapRemove
|
||||
Lists when removing a Element shift usually the backing array to the left based to shrink the elements.
|
||||
While that isn't computational expensive with LinkedLists, it is with ArrayLists.
|
||||
Here comes swapRemove into play, which just removes the desired elements and instead of shifting left puts the last element in its place.
|
||||
This reduces the data copying required down to 1 element instead of an array.
|
||||
|
||||
```java
|
||||
public int remove(IntList elements, int indexToRemove) {
|
||||
return elements.swapRemove(indexToRemove);
|
||||
}
|
||||
```
|
||||
|
||||
## Unstable Sort (From FastUtil)
|
||||
Unstable Sort uses a Faster but not as stable sorting algorithm to sort the Collection.
|
||||
Stable doesn't mean crashing, but more like that the result isn't exactly perfectly sorted.
|
||||
```java
|
||||
public void sort(List<Month> list, Comparator<Month> sorter) {
|
||||
list.unstableSort(sorter);
|
||||
}
|
||||
```
|
||||
</p>
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Map</summary>
|
||||
<p>
|
||||
|
||||
These functions are based on the Map interface. Useful functions you really would want.
|
||||
|
||||
## addTo/subFrom
|
||||
addTo (from FastUtil) and subFrom are mathematically functions that either add or subtract from the value of a given key.
|
||||
And if the key isn't present or would result in the default value it will either add or remove the entry from the Map. Given the circumstance.
|
||||
This is a really useful function and I wish FastUtil made it accessible by default but sadly it isn't.
|
||||
To simplify the explanation:
|
||||
- addTo if no element is present puts in the desired number, otherwise it sums up the two values.
|
||||
- subFrom if a element is present subtracts from it, if the element reaches the default value it removes the element from the map. If not present it will be ignored.
|
||||
```java
|
||||
public void addTo(Object2DoubleMap<Month> map, Month key, double averageTrainsRepaired) {
|
||||
map.addTo(key, averageTrainsRepaired);
|
||||
}
|
||||
|
||||
public void subFrom(Long2IntMap map, long key, double amount) {
|
||||
map.subFrom(key, amount);
|
||||
}
|
||||
```
|
||||
|
||||
## addToAll
|
||||
Simple bulk version of the addTo function since sometimes you want to merge 2 maps for summing.
|
||||
Especially if your work is multi-threaded this can become useful.
|
||||
|
||||
```java
|
||||
public void addTo(Object2DoubleMap<Month> map, Object2DoubleMap<Month> trainsRepaired) {
|
||||
map.addToAll(trainsRepaired);
|
||||
}
|
||||
```
|
||||
|
||||
## mergeAll
|
||||
This is a simple bulk version of merge since merging 2 maps is more frequent then people might think and leads to cleaner code too.
|
||||
```java
|
||||
public void merge(Long2ByteMap result, Long2ByteMap toMerge) {
|
||||
result.mergeAll(toMerge);
|
||||
}
|
||||
```
|
||||
|
||||
## putAll (Array)
|
||||
This allows to put keys and values as arrays instead of requiring a WrapperMap to insert the elements.
|
||||
Not as useful as the Collections.addAll variant but still really useful.
|
||||
```java
|
||||
public void putAll(Int2DoubleMap map, int[] keys, double[] values) {
|
||||
map.put(keys, values, 2, 15);
|
||||
}
|
||||
```
|
||||
|
||||
## putAllIfAbsent
|
||||
putAll has this usual quirk where if a element is present it will replace the value, and sometimes this is not wanted.
|
||||
While putIfAbsent exists it has no real mass form and makes iterative solutions really uneasy to use.
|
||||
Here comes the helper function that gets rid of that problem.
|
||||
```java
|
||||
public void merge(Long2ObjectMap<String> regionFiles, Long2ObjectMap<String> toAdd) {
|
||||
regionFiles.putAllIfAbsent(toAdd);
|
||||
}
|
||||
```
|
||||
|
||||
## removeOrDefault
|
||||
getOrDefault is a really useful function that find use cases all the time.
|
||||
Sadly by default there is no variant of removeOrDefault, while it has less cases still could be used every now and then.
|
||||
This function basically tries to remove a element, if it is not present it will just return your desired default.
|
||||
```java
|
||||
public Path removeCache(Long2ObjectMap<Path> caches, long key) {
|
||||
return caches.removeOrDefault(key, Paths.get("nuclearFun"));
|
||||
}
|
||||
```
|
||||
|
||||
## supplyIfAbsent
|
||||
This one is one of my favorites. computeIfAbsent is a really useful function.
|
||||
But in 90% of the cases I use it the value is a collection.
|
||||
This becomes really annoying since methodReferences are faster/cleaner then Lambdas in my opinion.
|
||||
supplyIfAbsent is basically computeIfAbsent but without a key, perfect for the default constructor of a collection.
|
||||
This is the whole reason it exists.
|
||||
```java
|
||||
public void example(Int2ObjectMap<List<String>> map, Int2ObjectMap<String> toAdd) {
|
||||
for(Entry<String> entry : toAdd.entrySet()) {
|
||||
map.supplyIfAbsent(entry.getKey(), ObjectArrayList::new).add(entry.getValue());
|
||||
}
|
||||
}
|
||||
```
|
||||
</p>
|
||||
</details>
|
||||
|
||||
# Interfaces
|
||||
|
||||
Interfaces that provide essential or quality of life features.
|
||||
|
||||
<details>
|
||||
<summary>ITrimmable</summary>
|
||||
<p>
|
||||
|
||||
The ITrimmable is Accessor interface that allows you to access a couple helper functions to control the size of your collections.
|
||||
This was created for the constant casting requirement to implementations just to shrink collections which get annoying over time.
|
||||
|
||||
## trim
|
||||
This function basically trims down the backing implementation to use as little memory as required to store the elements in the collection.
|
||||
Optionally a desired minimum size can be provided as of how low it should go at worst.
|
||||
|
||||
## clearAndTrim
|
||||
when you want to reset a Collection completely you have 2 options. Clear it and then call trim, or recreate the collection.
|
||||
clearAndTrim solves this problem by clearing the collection and trimming it in one go, reducing overhead to achieve such a thing.
|
||||
|
||||
</p>
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>IArray</summary>
|
||||
<p>
|
||||
|
||||
IArray is a Accessor interface that provides more access to collections by providing tools to grow your collection as needed.
|
||||
While putAll/addAll try to ensure that you have enough room for your elements, this is not really a solution for all cases.
|
||||
Sometimes you need to ensure the Collection is pre-initialized.
|
||||
IArray grants you that control.
|
||||
|
||||
There is also a type specific that provides you access to the backing array implementation of Lists for faster Iteration but that is a really specific case.
|
||||
|
||||
## ensureCapacity
|
||||
Ensures that your collection has enough storage for the elements you want to insert.
|
||||
|
||||
## elements (ITypeSpecificArray)
|
||||
Allows you access to the backing array of a List which is for people who know what they are doing.
|
||||
There is a lambda version of this function too which makes sure for synchronizedLists that you are the only one accessing the array.
|
||||
|
||||
</p>
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>OrderedMap</summary>
|
||||
<p>
|
||||
|
||||
The OrderedMap is a real edge case interface that was born for a need.
|
||||
FastUtil added functions that were like moveToFirst which were hardcoded to the implementation.
|
||||
They didn't fit into something like a SortedMap because the Set wasn't sorted.
|
||||
So OrderedMap was born, which isn't random but ordered in a specific way that can be changed.
|
||||
|
||||
## getAndMoveToFirst/getAndMoveToLast
|
||||
Returns a desired element and removing it to the first/last spot in the Map. Moving the element that was at its spot after/before it.
|
||||
|
||||
## moveToFirst/moveToLast
|
||||
Moves the element if present to the first/last spot in the Map. Moving the element that was at its spot after/before it.
|
||||
Returns true if the element was actually moved.
|
||||
|
||||
## putAndMoveToFirst/putAndMoveToLast
|
||||
Adds the desired element and moves it to first/last spot in the Map. Moving the element that was at its spot after/before it.
|
||||
|
||||
## firstKey/lastKey (Optional poll)
|
||||
Provides access to the current first/last key of the Map.
|
||||
Optionally can be polled if desired.
|
||||
|
||||
## firstValue/lastValue
|
||||
Provides access to the current first/last value of the Map.
|
||||
|
||||
</p>
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>OrderedSet</summary>
|
||||
<p>
|
||||
|
||||
The OrderedSet is a real edge case interface that was born for a need.
|
||||
FastUtil added functions that were like moveToFirst which were hardcoded to the implementation.
|
||||
They didn't fit into something like a SortedSet because the Set wasn't sorted.
|
||||
So OrderedSet was born, which isn't random but ordered in a specific way that can be changed.
|
||||
|
||||
## addAndMoveToFirst/addAndMoveToLast
|
||||
Adds the desired element and moves it to first/last spot in the Collection. Moving the element that was at its spot after/before it.
|
||||
|
||||
## moveToFirst/moveToLast
|
||||
Moves the element if present to the first/last spot in the Collection. Moving the element that was at its spot after/before it.
|
||||
Returns true if the element was actually moved.
|
||||
|
||||
## first/last (Optional poll)
|
||||
Provides access to the current first/last element of the set.
|
||||
Optionally can be polled if desired.
|
||||
|
||||
</p>
|
||||
</details>
|
||||
6
gradle.properties
Normal file
6
gradle.properties
Normal file
@ -0,0 +1,6 @@
|
||||
org.gradle.jvmargs=-Xmx3G
|
||||
|
||||
maxMemory = 1024m
|
||||
testThreads = 4
|
||||
|
||||
RELEASE_VERSION = 0.9.0
|
||||
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
10
gradle/wrapper/gradle-wrapper.properties
vendored
10
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,5 +1,5 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-6.0-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.1-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
||||
6
jitpack.yml
Normal file
6
jitpack.yml
Normal file
@ -0,0 +1,6 @@
|
||||
jdk:
|
||||
- openjdk9
|
||||
install:
|
||||
- chmod +x ./gradlew
|
||||
- ./gradlew build publishToMavenLocal
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@ -3,15 +3,15 @@ package speiger.src.builder;
|
||||
@SuppressWarnings("javadoc")
|
||||
public enum ClassType
|
||||
{
|
||||
BOOLEAN("boolean", "Boolean", "Boolean", "booleans", "BOOLEAN", "false"),
|
||||
BYTE("byte", "Byte", "Byte", "bytes", "BYTE", "(byte)0"),
|
||||
SHORT("short", "Short", "Short", "shorts", "SHORT", "(short)0"),
|
||||
CHAR("char", "Character", "Char", "chars", "CHAR", "(char)0"),
|
||||
INT("int", "Integer", "Int", "ints", "INT", "0"),
|
||||
LONG("long", "Long", "Long", "longs", "LONG", "0L"),
|
||||
FLOAT("float", "Float", "Float", "floats", "FLOAT", "0F"),
|
||||
DOUBLE("double", "Double", "Double", "doubles", "DOUBLE", "0D"),
|
||||
OBJECT("T", "T", "Object", "objects", "OBJECT", "null");
|
||||
BOOLEAN("boolean", "Boolean", "Boolean", "booleans", "BOOLEAN", "false", "false"),
|
||||
BYTE("byte", "Byte", "Byte", "bytes", "BYTE", "(byte)0", "(byte)-1"),
|
||||
SHORT("short", "Short", "Short", "shorts", "SHORT", "(short)0", "(short)-1"),
|
||||
CHAR("char", "Character", "Char", "chars", "CHAR", "(char)0", "(char)-1"),
|
||||
INT("int", "Integer", "Int", "ints", "INT", "0", "-1"),
|
||||
LONG("long", "Long", "Long", "longs", "LONG", "0L", "-1L"),
|
||||
FLOAT("float", "Float", "Float", "floats", "FLOAT", "0F", "-1F"),
|
||||
DOUBLE("double", "Double", "Double", "doubles", "DOUBLE", "0D", "-1D"),
|
||||
OBJECT("T", "T", "Object", "objects", "OBJECT", "null", "null");
|
||||
|
||||
String keyType;
|
||||
String classType;
|
||||
@ -19,8 +19,9 @@ public enum ClassType
|
||||
String pathType;
|
||||
String capType;
|
||||
String emptyValue;
|
||||
String invalidValue;
|
||||
|
||||
private ClassType(String keyType, String classType, String fileType, String pathType, String capType, String emptyValue)
|
||||
private ClassType(String keyType, String classType, String fileType, String pathType, String capType, String emptyValue, String invalidValue)
|
||||
{
|
||||
this.keyType = keyType;
|
||||
this.classType = classType;
|
||||
@ -28,6 +29,7 @@ public enum ClassType
|
||||
this.pathType = pathType;
|
||||
this.capType = capType;
|
||||
this.emptyValue = emptyValue;
|
||||
this.invalidValue = invalidValue;
|
||||
}
|
||||
|
||||
public String getKeyType()
|
||||
@ -50,6 +52,11 @@ public enum ClassType
|
||||
return classType;
|
||||
}
|
||||
|
||||
public String getClassPath()
|
||||
{
|
||||
return this == OBJECT ? "Object" : classType;
|
||||
}
|
||||
|
||||
public String getClassType(boolean value)
|
||||
{
|
||||
return value && this == OBJECT ? "V" : classType;
|
||||
@ -90,6 +97,11 @@ public enum ClassType
|
||||
return emptyValue;
|
||||
}
|
||||
|
||||
public String getInvalidValue()
|
||||
{
|
||||
return invalidValue;
|
||||
}
|
||||
|
||||
public boolean isObject()
|
||||
{
|
||||
return this == OBJECT;
|
||||
@ -102,7 +114,7 @@ public enum ClassType
|
||||
|
||||
public boolean needsCustomJDKType()
|
||||
{
|
||||
return this == BYTE || this == SHORT || this == CHAR || this == FLOAT;
|
||||
return this == BOOLEAN || this == BYTE || this == SHORT || this == CHAR || this == FLOAT;
|
||||
}
|
||||
|
||||
public boolean needsCast()
|
||||
@ -121,6 +133,12 @@ public enum ClassType
|
||||
}
|
||||
}
|
||||
|
||||
public String getApply(ClassType other) {
|
||||
if(other == BOOLEAN) return "test";
|
||||
if(other == ClassType.OBJECT) return "apply";
|
||||
return "applyAs"+other.getFileType();
|
||||
}
|
||||
|
||||
public String getEquals(boolean not)
|
||||
{
|
||||
switch(this)
|
||||
|
||||
@ -1,383 +0,0 @@
|
||||
package speiger.src.builder;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.UnaryOperator;
|
||||
|
||||
import speiger.src.builder.mappers.ArgumentMapper;
|
||||
import speiger.src.builder.mappers.IMapper;
|
||||
import speiger.src.builder.mappers.InjectMapper;
|
||||
import speiger.src.builder.mappers.LineMapper;
|
||||
import speiger.src.builder.mappers.SimpleMapper;
|
||||
import speiger.src.builder.processor.TemplateProcess;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class GlobalVariables
|
||||
{
|
||||
List<IMapper> operators = new ArrayList<>();
|
||||
Set<String> flags = new LinkedHashSet<>();
|
||||
ClassType type;
|
||||
ClassType valueType;
|
||||
|
||||
public GlobalVariables(ClassType type, ClassType subType)
|
||||
{
|
||||
this.type = type;
|
||||
valueType = subType;
|
||||
}
|
||||
|
||||
public GlobalVariables createVariables()
|
||||
{
|
||||
addSimpleMapper("VALUE_PACKAGE", valueType.getPathType());
|
||||
addSimpleMapper("PACKAGE", type.getPathType());
|
||||
addSimpleMapper("CLASS_TYPE", type.getClassType());
|
||||
addSimpleMapper("CLASS_VALUE_TYPE", valueType.getClassValueType());
|
||||
addSimpleMapper("KEY_TYPE", type.getKeyType());
|
||||
addSimpleMapper("VALUE_TYPE", valueType.getValueType());
|
||||
|
||||
addSimpleMapper("EMPTY_KEY_VALUE", type.getEmptyValue());
|
||||
addSimpleMapper("EMPTY_VALUE", valueType.getEmptyValue());
|
||||
|
||||
addSimpleMapper(" KEY_GENERIC_TYPE", type.isObject() ? "<"+type.getKeyType()+">" : "");
|
||||
addSimpleMapper(" KEY_KEY_GENERIC_TYPE", type.isObject() ? "<"+type.getKeyType()+", "+type.getKeyType()+">" : "");
|
||||
addSimpleMapper(" VALUE_GENERIC_TYPE", valueType.isObject() ? "<"+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" VALUE_VALUE_GENERIC_TYPE", valueType.isObject() ? "<"+valueType.getValueType()+", "+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" KEY_VALUE_GENERIC_TYPE", type.isObject() ? (valueType.isObject() ? "<"+type.getKeyType()+", "+valueType.getValueType()+">" : "<"+type.getKeyType()+">") : (valueType.isObject() ? "<"+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" KEY_VALUE_VALUE_GENERIC_TYPE", type.isObject() ? (valueType.isObject() ? "<"+type.getKeyType()+", "+valueType.getValueType()+", "+valueType.getValueType()+">" : "<"+type.getKeyType()+">") : (valueType.isObject() ? "<"+valueType.getValueType()+", "+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" NO_GENERIC_TYPE", type.isObject() ? "<?>" : "");
|
||||
addSimpleMapper(" NO_KV_GENERIC_TYPE", type.isObject() ? (valueType.isObject() ? "<?, ?>" : "<?>") : valueType.isObject() ? "<?>" : "");
|
||||
addSimpleMapper(" KEY_COMPAREABLE_TYPE", type.isObject() ? "<"+type.getKeyType()+" extends Comparable<T>>" : "");
|
||||
addSimpleMapper(" KEY_SUPER_GENERIC_TYPE", type.isObject() ? "<? super "+type.getKeyType()+">" : "");
|
||||
addSimpleMapper(" VALUE_SUPER_GENERIC_TYPE", valueType.isObject() ? "<? super "+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" KEY_VALUE_SUPER_GENERIC_TYPE", type.isObject() ? (valueType.isObject() ? "<? super "+type.getKeyType()+", ? super "+valueType.getValueType()+">" : "<? super "+type.getKeyType()+">") : (valueType.isObject() ? "<? super "+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" KEY_ENUM_VALUE_GENERIC_TYPE", type.isObject() ? (valueType.isObject() ? "<"+type.getKeyType()+" extends Enum<"+type.getKeyType()+">, "+valueType.getValueType()+">" : "<"+type.getKeyType()+" extends Enum<"+type.getKeyType()+">>") : (valueType.isObject() ? "<"+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" KEY_VALUE_ENUM_GENERIC_TYPE", type.isObject() ? (valueType.isObject() ? "<"+type.getKeyType()+", "+valueType.getValueType()+" extends Enum<"+valueType.getValueType()+">>" : "<"+type.getKeyType()+">") : (valueType.isObject() ? "<"+valueType.getValueType()+" extends Enum<"+valueType.getValueType()+">>" : ""));
|
||||
|
||||
addSimpleMapper(" GENERIC_KEY_BRACES", type.isObject() ? " <"+type.getKeyType()+">" : "");
|
||||
addSimpleMapper(" GENERIC_VALUE_BRACES", type.isObject() ? " <"+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" GENERIC_KEY_VALUE_BRACES", type.isObject() ? (valueType.isObject() ? " <"+type.getKeyType()+", "+valueType.getValueType()+">" : " <"+type.getKeyType()+">") : (valueType.isObject() ? " <"+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" COMPAREABLE_KEY_BRACES", type.isObject() ? " <"+type.getKeyType()+" extends Comparable<T>>" : "");
|
||||
addSimpleMapper("KV_BRACES", type.isObject() || valueType.isObject() ? "<>" : "");
|
||||
addSimpleMapper("BRACES", type.isObject() ? "<>" : "");
|
||||
if(type.needsCustomJDKType())
|
||||
{
|
||||
addSimpleMapper("JAVA_TYPE", type.getCustomJDKType().getKeyType());
|
||||
addSimpleMapper("SANITY_CAST", "castTo"+type.getFileType());
|
||||
}
|
||||
addSimpleMapper("JAVA_CLASS", type.getCustomJDKType().getClassType());
|
||||
if(valueType.needsCustomJDKType())
|
||||
{
|
||||
addSimpleMapper("SANITY_CAST_VALUE", "castTo"+valueType.getFileType());
|
||||
}
|
||||
addComment("@ArrayType", "@param <%s> the type of array that the operation should be applied");
|
||||
addComment("@Type", "@param <%s> the type of elements maintained by this Collection");
|
||||
addValueComment("@ValueArrayType", "@param <%s> the type of array that the operation should be applied");
|
||||
addValueComment("@ValueType", "@param <%s> the type of elements maintained by this Collection");
|
||||
addAnnontion("@PrimitiveOverride", "@Override");
|
||||
addSimpleMapper("@PrimitiveDoc", "");
|
||||
addAnnontion("@Primitive", "@Deprecated");
|
||||
return this;
|
||||
}
|
||||
|
||||
public GlobalVariables createHelperVariables()
|
||||
{
|
||||
createHelperVars(type, false, "KEY");
|
||||
createHelperVars(valueType, true, "VALUE");
|
||||
return this;
|
||||
}
|
||||
|
||||
private void createHelperVars(ClassType type, boolean value, String fix)
|
||||
{
|
||||
addArgumentMapper("EQUALS_"+fix+"_TYPE", "Objects.equals(%2$s, "+(type.isObject() ? "%1$s" : fix+"_TO_OBJ(%1$s)")+")").removeBraces();
|
||||
addInjectMapper(fix+"_EQUALS_NOT_NULL", type.getComparableValue()+" != "+(type.isPrimitiveBlocking() || type.needsCast() ? type.getEmptyValue() : "0")).removeBraces();
|
||||
addInjectMapper(fix+"_EQUALS_NULL", type.getComparableValue()+" == "+(type.isPrimitiveBlocking() || type.needsCast() ? type.getEmptyValue() : "0")).removeBraces();
|
||||
addArgumentMapper(fix+"_EQUALS_NOT", type.getEquals(true)).removeBraces();
|
||||
addArgumentMapper(fix+"_EQUALS", type.getEquals(false)).removeBraces();
|
||||
|
||||
addArgumentMapper("COMPAREABLE_TO_"+fix, type.isObject() ? "((Comparable<"+type.getKeyType(value)+">)%1$s).compareTo(("+type.getKeyType(value)+")%2$s)" : type.getClassType(value)+".compare(%1$s, %2$s)").removeBraces();
|
||||
addArgumentMapper("COMPARE_TO_"+fix, type.isObject() ? "%1$s.compareTo(%2$s)" : type.getClassType(value)+".compare(%1$s, %2$s)").removeBraces();
|
||||
|
||||
addInjectMapper(fix+"_TO_OBJ", type.isObject() ? "%s" : type.getClassType(value)+".valueOf(%s)").removeBraces();
|
||||
addInjectMapper("OBJ_TO_"+fix, type.isObject() ? "%s" : "%s."+type.getKeyType(value)+"Value()").removeBraces();
|
||||
addInjectMapper("CLASS_TO_"+fix, type.isObject() ? "("+type.getKeyType(value)+")%s" : "(("+type.getClassType(value)+")%s)."+type.getKeyType(value)+"Value()").removeBraces();
|
||||
|
||||
addInjectMapper(fix+"_TO_HASH", type.isObject() ? "%s.hashCode()" : type.getClassType(value)+".hashCode(%s)").removeBraces();
|
||||
addInjectMapper(fix+"_TO_STRING", type.isObject() ? "%s.toString()" : type.getClassType(value)+".toString(%s)").removeBraces();
|
||||
|
||||
addSimpleMapper("CAST_"+fix+"_ARRAY ", type.isObject() ? "("+fix+"_TYPE[])" : "");
|
||||
addSimpleMapper("EMPTY_"+fix+"_ARRAY", type.isObject() ? "("+fix+"_TYPE[])ARRAYS.EMPTY_ARRAY" : "ARRAYS.EMPTY_ARRAY");
|
||||
addInjectMapper("NEW_"+fix+"_ARRAY", type.isObject() ? "("+fix+"_TYPE[])new Object[%s]" : "new "+fix+"_TYPE[%s]").removeBraces();
|
||||
addInjectMapper("NEW_CLASS"+(value ? "_VALUE" : "")+"_ARRAY", type.isObject() ? "(CLASS_TYPE[])new Object[%s]" : "new CLASS_TYPE[%s]").removeBraces();
|
||||
}
|
||||
|
||||
public GlobalVariables createPreFunctions()
|
||||
{
|
||||
addSimpleMapper("ENTRY_SET", type.getFileType().toLowerCase()+"2"+valueType.getFileType()+"EntrySet");
|
||||
return this;
|
||||
}
|
||||
|
||||
public GlobalVariables createClassTypes()
|
||||
{
|
||||
addSimpleMapper("JAVA_PREDICATE", type.isPrimitiveBlocking() ? "" : type.getCustomJDKType().getFileType()+"Predicate");
|
||||
addSimpleMapper("JAVA_CONSUMER", type.isPrimitiveBlocking() ? "" : "java.util.function."+type.getCustomJDKType().getFileType()+"Consumer");
|
||||
addSimpleMapper("JAVA_FUNCTION", type.getFunctionClass(valueType));
|
||||
addSimpleMapper("JAVA_BINARY_OPERATOR", type == ClassType.BOOLEAN ? "" : (type.isObject() ? "java.util.function.BinaryOperator" : "java.util.function."+type.getCustomJDKType().getFileType()+"BinaryOperator"));
|
||||
addSimpleMapper("JAVA_UNARY_OPERATOR", type.isObject() ? "BinaryOperator" : type == ClassType.BOOLEAN ? "" : type.getCustomJDKType().getFileType()+"UnaryOperator");
|
||||
addSimpleMapper("JAVA_SPLIT_ITERATOR", type.isPrimitiveBlocking() ? "Spliterator" : "Of"+type.getCustomJDKType().getFileType());
|
||||
addSimpleMapper("JAVA_STREAM", type.isPrimitiveBlocking() ? "" : type.getCustomJDKType().getFileType()+"Stream");
|
||||
|
||||
//Final Classes
|
||||
addClassMapper("ARRAY_LIST", "ArrayList");
|
||||
addClassMapper("ARRAY_FIFO_QUEUE", "ArrayFIFOQueue");
|
||||
addClassMapper("ARRAY_PRIORITY_QUEUE", "ArrayPriorityQueue");
|
||||
addClassMapper("HEAP_PRIORITY_QUEUE", "HeapPriorityQueue");
|
||||
addClassMapper("LINKED_CUSTOM_HASH_SET", "LinkedOpenCustomHashSet");
|
||||
addClassMapper("LINKED_HASH_SET", "LinkedOpenHashSet");
|
||||
addClassMapper("CUSTOM_HASH_SET", "OpenCustomHashSet");
|
||||
addClassMapper("HASH_SET", "OpenHashSet");
|
||||
addBiClassMapper("LINKED_CUSTOM_HASH_MAP", "LinkedOpenCustomHashMap", "2");
|
||||
addBiClassMapper("LINKED_HASH_MAP", "LinkedOpenHashMap", "2");
|
||||
addBiClassMapper("CUSTOM_HASH_MAP", "OpenCustomHashMap", "2");
|
||||
addBiClassMapper("AVL_TREE_MAP", "AVLTreeMap", "2");
|
||||
addBiClassMapper("RB_TREE_MAP", "RBTreeMap", "2");
|
||||
addFunctionValueMappers("ENUM_MAP", valueType.isObject() ? "Enum2ObjectMap" : "Enum2%sMap");
|
||||
addBiClassMapper("HASH_MAP", "OpenHashMap", "2");
|
||||
addBiClassMapper("ARRAY_MAP", "ArrayMap", "2");
|
||||
addClassMapper("RB_TREE_SET", "RBTreeSet");
|
||||
addClassMapper("AVL_TREE_SET", "AVLTreeSet");
|
||||
addClassMapper("ARRAY_SET", "ArraySet");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_COLLECTION", "Abstract%sCollection");
|
||||
addAbstractMapper("ABSTRACT_SET", "Abstract%sSet");
|
||||
addAbstractMapper("ABSTRACT_LIST", "Abstract%sList");
|
||||
addAbstractBiMapper("ABSTRACT_MAP", "Abstract%sMap", "2");
|
||||
addClassMapper("SUB_LIST", "SubList");
|
||||
|
||||
//Helper Classes
|
||||
addClassMapper("LISTS", "Lists");
|
||||
addClassMapper("SETS", "Sets");
|
||||
addClassMapper("COLLECTIONS", "Collections");
|
||||
addClassMapper("ARRAYS", "Arrays");
|
||||
addClassMapper("SPLIT_ITERATORS", "Splititerators");
|
||||
addClassMapper("ITERATORS", "Iterators");
|
||||
addBiClassMapper("MAPS", "Maps", "2");
|
||||
|
||||
//Interfaces
|
||||
addClassMapper("LIST_ITERATOR", "ListIterator");
|
||||
addClassMapper("BI_ITERATOR", "BidirectionalIterator");
|
||||
addBiClassMapper("BI_CONSUMER", "Consumer", "");
|
||||
addClassMapper("SPLIT_ITERATOR", "Splititerator");
|
||||
addClassMapper("ITERATOR", "Iterator");
|
||||
addClassMapper("ITERABLE", "Iterable");
|
||||
addClassMapper("COLLECTION", "Collection");
|
||||
addBiClassMapper("FUNCTION", "Function", "2");
|
||||
addClassMapper("LIST_ITER", "ListIter");
|
||||
addClassMapper("LIST", "List");
|
||||
addBiClassMapper("NAVIGABLE_MAP", "NavigableMap", "2");
|
||||
addBiClassMapper("SORTED_MAP", "SortedMap", "2");
|
||||
addBiClassMapper("MAP", "Map", "2");
|
||||
addClassMapper("NAVIGABLE_SET", "NavigableSet");
|
||||
addClassMapper("PRIORITY_QUEUE", "PriorityQueue");
|
||||
addClassMapper("PRIORITY_DEQUEUE", "PriorityDequeue");
|
||||
addClassMapper("SORTED_SET", "SortedSet");
|
||||
addClassMapper("SET", "Set");
|
||||
addClassMapper("STRATEGY", "Strategy");
|
||||
addClassMapper("STACK", "Stack");
|
||||
addBiClassMapper("UNARY_OPERATOR", "UnaryOperator", "");
|
||||
if(type.isObject())
|
||||
{
|
||||
if(!valueType.isObject())
|
||||
{
|
||||
addSimpleMapper("VALUE_CONSUMER", valueType.getFileType()+"Consumer");
|
||||
}
|
||||
addSimpleMapper("CONSUMER", "Consumer");
|
||||
addSimpleMapper("COMPARATOR", "Comparator");
|
||||
addSimpleMapper("IARRAY", "IObjectArray");
|
||||
}
|
||||
else
|
||||
{
|
||||
addClassMapper("CONSUMER", "Consumer");
|
||||
addClassMapper("COMPARATOR", "Comparator");
|
||||
addFunctionMappers("IARRAY", "I%sArray");
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public GlobalVariables createFunctions()
|
||||
{
|
||||
addSimpleMapper("APPLY_VALUE", valueType.isObject() ? "apply" : "applyAs"+valueType.getNonFileType());
|
||||
addSimpleMapper("APPLY_CAST", "applyAs"+type.getCustomJDKType().getNonFileType());
|
||||
addSimpleMapper("APPLY", type.isObject() ? "apply" : "applyAs"+type.getNonFileType());
|
||||
addFunctionValueMappers("COMPUTE_IF_ABSENT", "compute%sIfAbsent");
|
||||
addFunctionValueMappers("COMPUTE_IF_PRESENT", "compute%sIfPresent");
|
||||
addFunctionValueMapper("COMPUTE", "compute");
|
||||
addFunctionMapper("ENQUEUE_FIRST", "enqueueFirst");
|
||||
addFunctionMapper("ENQUEUE", "enqueue");
|
||||
addFunctionMapper("DEQUEUE_LAST", "dequeueLast");
|
||||
addFunctionMapper("DEQUEUE", "dequeue");
|
||||
addFunctionMappers("POLL_FIRST_ENTRY_KEY", "pollFirst%sKey");
|
||||
addFunctionMappers("POLL_LAST_ENTRY_KEY", "pollLast%sKey");
|
||||
addFunctionMapper("POLL_FIRST_KEY", "pollFirst");
|
||||
addFunctionMapper("POLL_LAST_KEY", "pollLast");
|
||||
addFunctionMappers("FIRST_ENTRY_KEY", "first%sKey");
|
||||
addFunctionValueMappers("FIRST_ENTRY_VALUE", "first%sValue");
|
||||
addFunctionMapper("FIRST_KEY", "first");
|
||||
addFunctionMappers("LAST_ENTRY_KEY", "last%sKey");
|
||||
addFunctionValueMappers("LAST_ENTRY_VALUE", "last%sValue");
|
||||
addFunctionMappers("ENTRY_KEY", "get%sKey");
|
||||
addFunctionValueMappers("ENTRY_VALUE", "get%sValue");
|
||||
addFunctionMapper("GET_KEY", "get");
|
||||
addFunctionValueMapper("GET_VALUE", valueType.isObject() ? "getObject" : "get");
|
||||
addFunctionMapper("LAST_KEY", "last");
|
||||
addFunctionValueMapper("MERGE", "merge");
|
||||
addFunctionMapper("NEXT", "next");
|
||||
addFunctionMapper("PREVIOUS", "previous");
|
||||
addFunctionMapper("PEEK", "peek");
|
||||
addFunctionMapper("POP", "pop");
|
||||
addFunctionMapper("PUSH", "push");
|
||||
addFunctionMapper("REMOVE_KEY", "rem");
|
||||
addFunctionMapper("REMOVE_LAST", "removeLast");
|
||||
addFunctionMapper("REMOVE", "remove");
|
||||
addFunctionValueMappers("REPLACE_VALUES", valueType.isObject() ? "replaceObjects" : "replace%ss");
|
||||
addFunctionMappers("REPLACE", type.isObject() ? "replaceObjects" : "replace%ss");
|
||||
addFunctionMappers("SORT", "sort%ss");
|
||||
addSimpleMapper("NEW_STREAM", type.isPrimitiveBlocking() ? "" : type.getCustomJDKType().getKeyType()+"Stream");
|
||||
addSimpleMapper("TO_ARRAY", "to"+type.getNonFileType()+"Array");
|
||||
addFunctionMapper("TOP", "top");
|
||||
return this;
|
||||
}
|
||||
|
||||
public GlobalVariables createFlags()
|
||||
{
|
||||
flags.add("TYPE_"+type.getCapType());
|
||||
flags.add("VALUE_"+valueType.getCapType());
|
||||
if(type == valueType) flags.add("SAME_TYPE");
|
||||
if(type.hasFunction(valueType)) flags.add("JDK_FUNCTION");
|
||||
if(!type.needsCustomJDKType()) flags.add("JDK_TYPE");
|
||||
if(!type.isPrimitiveBlocking()) flags.add("PRIMITIVES");
|
||||
if(!valueType.isPrimitiveBlocking()) flags.add("VALUE_PRIMITIVES");
|
||||
if(valueType.needsCustomJDKType()) flags.add("JDK_VALUE");
|
||||
return this;
|
||||
}
|
||||
|
||||
public TemplateProcess create(String fileName, String splitter, boolean valueOnly)
|
||||
{
|
||||
TemplateProcess process = new TemplateProcess(String.format(fileName+".java", (splitter != null ? type.getFileType()+splitter+valueType.getFileType() : (valueOnly ? valueType : type).getFileType())));
|
||||
process.setPathBuilder(new PathBuilder(type.getPathType()));
|
||||
process.addFlags(flags);
|
||||
process.addMappers(operators);
|
||||
return process;
|
||||
}
|
||||
|
||||
public ClassType getType()
|
||||
{
|
||||
return type;
|
||||
}
|
||||
|
||||
private void addClassMapper(String pattern, String replacement)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"[VALUE_"+pattern+"]", "VALUE_"+pattern, valueType.getFileType()+replacement));
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, type.getFileType()+replacement));
|
||||
}
|
||||
|
||||
private void addBiClassMapper(String pattern, String replacement, String splitter)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"[KEY_"+pattern+"]", "KEY_"+pattern, type.getFileType()+splitter+type.getFileType()+replacement));
|
||||
operators.add(new SimpleMapper(type.name()+"[VALUE_"+pattern+"]", "VALUE_"+pattern, valueType.getFileType()+splitter+valueType.getFileType()+replacement));
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, type.getFileType()+splitter+valueType.getFileType()+replacement));
|
||||
}
|
||||
|
||||
private void addAbstractMapper(String pattern, String replacement)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"[VALUE_"+pattern+"]", "VALUE_"+pattern, String.format(replacement, valueType.getFileType())));
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, String.format(replacement, type.getFileType())));
|
||||
}
|
||||
|
||||
private void addAbstractBiMapper(String pattern, String replacement, String splitter)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, String.format(replacement, type.getFileType()+splitter+valueType.getFileType())));
|
||||
}
|
||||
|
||||
private void addFunctionMapper(String pattern, String replacement)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"[VALUE_"+pattern+"]", "VALUE_"+pattern, replacement+valueType.getNonFileType()));
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, replacement+type.getNonFileType()));
|
||||
}
|
||||
|
||||
private void addFunctionValueMapper(String pattern, String replacement)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, replacement+valueType.getNonFileType()));
|
||||
}
|
||||
|
||||
private void addFunctionMappers(String pattern, String replacement)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"[VALUE_"+pattern+"]", "VALUE_"+pattern, String.format(replacement, valueType.getNonFileType())));
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, String.format(replacement, type.getNonFileType())));
|
||||
}
|
||||
|
||||
private void addFunctionValueMappers(String pattern, String replacement)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, String.format(replacement, valueType.getNonFileType())));
|
||||
}
|
||||
|
||||
private void addSimpleMapper(String pattern, String replacement)
|
||||
{
|
||||
operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, replacement));
|
||||
}
|
||||
|
||||
private void addAnnontion(String pattern, String value)
|
||||
{
|
||||
if(type == ClassType.OBJECT) operators.add(new LineMapper(type.name()+"["+pattern+"]", pattern));
|
||||
else operators.add(new SimpleMapper(type.name()+"["+pattern+"]", pattern, value));
|
||||
}
|
||||
|
||||
private void addComment(String pattern, String value)
|
||||
{
|
||||
if(type == ClassType.OBJECT) operators.add(new InjectMapper(type.name()+"["+pattern+"]", pattern, value).removeBraces());
|
||||
else operators.add(new LineMapper(type.name()+"["+pattern+"]", pattern));
|
||||
}
|
||||
|
||||
private void addValueComment(String pattern, String value)
|
||||
{
|
||||
if(valueType == ClassType.OBJECT) operators.add(new InjectMapper(valueType.name()+"["+pattern+"]", pattern, value).removeBraces());
|
||||
else operators.add(new LineMapper(valueType.name()+"["+pattern+"]", pattern));
|
||||
}
|
||||
|
||||
private InjectMapper addInjectMapper(String pattern, String replacement)
|
||||
{
|
||||
InjectMapper mapper = new InjectMapper(type.name()+"["+pattern+"]", pattern, replacement);
|
||||
operators.add(mapper);
|
||||
return mapper;
|
||||
}
|
||||
|
||||
private ArgumentMapper addArgumentMapper(String pattern, String replacement)
|
||||
{
|
||||
return addArgumentMapper(pattern, replacement, ", ");
|
||||
}
|
||||
|
||||
private ArgumentMapper addArgumentMapper(String pattern, String replacement, String splitter)
|
||||
{
|
||||
ArgumentMapper mapper = new ArgumentMapper(type.name()+"["+pattern+"]", pattern, replacement, splitter);
|
||||
operators.add(mapper);
|
||||
return mapper;
|
||||
}
|
||||
|
||||
class PathBuilder implements UnaryOperator<Path>
|
||||
{
|
||||
String before;
|
||||
|
||||
public PathBuilder(String before)
|
||||
{
|
||||
this.before = before;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Path apply(Path t)
|
||||
{
|
||||
return t.subpath(0, 6).resolve(before).resolve(t.subpath(6, t.getNameCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
155
src/builder/java/speiger/src/builder/ModulePackage.java
Normal file
155
src/builder/java/speiger/src/builder/ModulePackage.java
Normal file
@ -0,0 +1,155 @@
|
||||
package speiger.src.builder;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.UnaryOperator;
|
||||
|
||||
import speiger.src.builder.mappers.IMapper;
|
||||
import speiger.src.builder.processor.TemplateProcess;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class ModulePackage
|
||||
{
|
||||
private static final BiConsumer<String, RequiredType> VOID = (K, V) -> {};
|
||||
public static final ClassType[] TYPE = ClassType.values();
|
||||
final ClassType keyType;
|
||||
final ClassType valueType;
|
||||
Set<String> blocked = new HashSet<>();
|
||||
Map<String, String> nameRemapper = new HashMap<>();
|
||||
Map<String, String> splitters = new HashMap<>();
|
||||
List<Predicate<String>> blockedFilters = new ArrayList<>();
|
||||
List<IMapper> mappers = new ArrayList<>();
|
||||
Set<String> flags = new LinkedHashSet<>();
|
||||
Set<String> globalFlags;
|
||||
Map<String, Integer> flaggedValues = new HashMap<>();
|
||||
BiConsumer<String, RequiredType> requirements = VOID;
|
||||
|
||||
public ModulePackage(Set<String> globalFlags, ClassType keyType, ClassType valueType) {
|
||||
this.globalFlags = globalFlags;
|
||||
this.keyType = keyType;
|
||||
this.valueType = valueType;
|
||||
}
|
||||
|
||||
public void finish() {
|
||||
requirements = VOID;
|
||||
mappers.sort(Comparator.comparing(IMapper::getSearchValue, Comparator.comparingInt(String::length).reversed()));
|
||||
mappers.sort(Comparator.comparing(IMapper::getSearchValue, this::sort));
|
||||
}
|
||||
|
||||
public void setRequirements(BiConsumer<String, RequiredType> requirements) {
|
||||
this.requirements = requirements;
|
||||
}
|
||||
|
||||
public boolean isSame() {
|
||||
return keyType == valueType;
|
||||
}
|
||||
|
||||
public boolean isEnumValid() {
|
||||
return keyType == ClassType.OBJECT;
|
||||
}
|
||||
|
||||
public ClassType getKeyType() {
|
||||
return keyType;
|
||||
}
|
||||
|
||||
public ClassType getValueType() {
|
||||
return valueType;
|
||||
}
|
||||
|
||||
public void addFlag(String flag) {
|
||||
flags.add(flag);
|
||||
}
|
||||
|
||||
public void addGlobalFlag(String flag) {
|
||||
globalFlags.add(flag);
|
||||
}
|
||||
|
||||
public void addValue(String key, int value) {
|
||||
flaggedValues.put(key, value);
|
||||
}
|
||||
|
||||
public void addRequirement(String fileName, RequiredType type) {
|
||||
requirements.accept(fileName, type);
|
||||
}
|
||||
|
||||
public void addMapper(IMapper mapper) {
|
||||
mappers.add(mapper);
|
||||
}
|
||||
|
||||
public void addBlockedFilter(Predicate<String> filter) {
|
||||
blockedFilters.add(filter);
|
||||
}
|
||||
|
||||
public void addBlockedFiles(String... names) {
|
||||
blocked.addAll(Arrays.asList(names));
|
||||
}
|
||||
|
||||
public void addSplitter(String fileName, String splitter) {
|
||||
splitters.put(fileName, splitter);
|
||||
}
|
||||
|
||||
public void addRemapper(String fileName, String actualName) {
|
||||
nameRemapper.put(fileName, actualName);
|
||||
}
|
||||
|
||||
public void process(String fileName, Consumer<TemplateProcess> result) {
|
||||
if(isBlocked(fileName)) return;
|
||||
String splitter = String.format(splitters.getOrDefault(fileName, keyType.getFileType()), keyType.getFileType(), valueType.getFileType());
|
||||
String newName = String.format(nameRemapper.getOrDefault(fileName, "%s"+fileName), splitter);
|
||||
TemplateProcess process = new TemplateProcess(newName+".java");
|
||||
process.setPathBuilder(new PathBuilder(keyType.getPathType()));
|
||||
process.addFlags(flags);
|
||||
process.addFlags(globalFlags);
|
||||
process.addMappers(mappers);
|
||||
process.addValues(flaggedValues);
|
||||
result.accept(process);
|
||||
}
|
||||
|
||||
private boolean isBlocked(String fileName) {
|
||||
if(blocked.contains(fileName)) return true;
|
||||
for(int i = 0,m=blockedFilters.size();i<m;i++) {
|
||||
if(blockedFilters.get(i).test(fileName)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static List<ModulePackage> createPackages(Set<String> globalFlags) {
|
||||
List<ModulePackage> list = new ArrayList<>();
|
||||
for(ClassType key : TYPE) {
|
||||
for(ClassType value : TYPE) {
|
||||
list.add(new ModulePackage(globalFlags, key, value));
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private int sort(String key, String value) {
|
||||
if(value.contains(key)) return 1;
|
||||
else if(key.contains(value)) return -1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
class PathBuilder implements UnaryOperator<Path> {
|
||||
String before;
|
||||
|
||||
public PathBuilder(String before) {
|
||||
this.before = before;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Path apply(Path t) {
|
||||
return t.subpath(0, 6).resolve(before).resolve(t.subpath(6, t.getNameCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,166 +1,230 @@
|
||||
package speiger.src.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import speiger.src.builder.processor.TemplateProcess;
|
||||
import speiger.src.builder.processor.TemplateProcessor;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class PrimitiveCollectionsBuilder extends TemplateProcessor
|
||||
{
|
||||
Map<String, EnumSet<ClassType>> blocked = new HashMap<>();
|
||||
Map<String, String> nameRemapper = new HashMap<>();
|
||||
Map<String, String> biRequired = new HashMap<>();
|
||||
Set<String> enumRequired = new HashSet<>();
|
||||
public static final ClassType[] TYPE = ClassType.values();
|
||||
List<GlobalVariables> variables = new ArrayList<>();
|
||||
List<GlobalVariables> biVariables = new ArrayList<>();
|
||||
List<GlobalVariables> enumVariables = new ArrayList<>();
|
||||
|
||||
public PrimitiveCollectionsBuilder()
|
||||
{
|
||||
super(Paths.get("src/builder/resources/speiger/assets/collections/templates/"), Paths.get("src/main/java/speiger/src/collections/"), Paths.get("src/builder/resources/speiger/assets/collections/"));
|
||||
}
|
||||
|
||||
public PrimitiveCollectionsBuilder(Path sourceFolder, Path outputFolder, Path dataFolder)
|
||||
{
|
||||
super(sourceFolder, outputFolder, dataFolder);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isFileValid(Path fileName)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean relativePackages()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean debugUnusedMappers()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init()
|
||||
{
|
||||
variables.clear();
|
||||
for(ClassType clzType : TYPE)
|
||||
{
|
||||
for(ClassType subType : TYPE)
|
||||
{
|
||||
create(clzType, subType);
|
||||
}
|
||||
}
|
||||
enumRequired.add("EnumMap");
|
||||
biRequired.put("BiConsumer", "");
|
||||
biRequired.put("UnaryOperator", "");
|
||||
addBiClass("Function", "Maps", "Map", "SortedMap", "NavigableMap", "AbstractMap", "OpenHashMap", "LinkedOpenHashMap", "OpenCustomHashMap", "LinkedOpenCustomHashMap", "ArrayMap", "RBTreeMap", "AVLTreeMap");
|
||||
nameRemapper.put("BiConsumer", "%sConsumer");
|
||||
nameRemapper.put("IArray", "I%sArray");
|
||||
nameRemapper.put("AbstractMap", "Abstract%sMap");
|
||||
nameRemapper.put("AbstractCollection", "Abstract%sCollection");
|
||||
nameRemapper.put("AbstractSet", "Abstract%sSet");
|
||||
nameRemapper.put("AbstractList", "Abstract%sList");
|
||||
nameRemapper.put("EnumMap", "Enum2%sMap");
|
||||
addBlockage(ClassType.OBJECT, "Consumer", "Comparator", "Stack");
|
||||
addBlockage(ClassType.BOOLEAN, "ArraySet", "AVLTreeSet", "RBTreeSet", "SortedSet", "NavigableSet", "OpenHashSet", "OpenCustomHashSet", "LinkedOpenHashSet", "LinkedOpenCustomHashSet");
|
||||
addBlockage(ClassType.BOOLEAN, "SortedMap", "NavigableMap", "OpenHashMap", "LinkedOpenHashMap", "OpenCustomHashMap", "LinkedOpenCustomHashMap", "ArrayMap", "RBTreeMap", "AVLTreeMap");
|
||||
}
|
||||
|
||||
protected void create(ClassType mainType, ClassType subType)
|
||||
{
|
||||
GlobalVariables type = new GlobalVariables(mainType, subType);
|
||||
type.createFlags();
|
||||
type.createHelperVariables();
|
||||
type.createVariables();
|
||||
type.createPreFunctions();
|
||||
type.createClassTypes();
|
||||
type.createFunctions();
|
||||
if(mainType == subType) variables.add(type);
|
||||
biVariables.add(type);
|
||||
if(mainType.isObject()) enumVariables.add(type);
|
||||
}
|
||||
|
||||
protected void addBiClass(String...classNames)
|
||||
{
|
||||
for(String s : classNames)
|
||||
{
|
||||
biRequired.put(s, "2");
|
||||
}
|
||||
}
|
||||
|
||||
protected void addBlockage(ClassType type, String...args)
|
||||
{
|
||||
for(String s : args)
|
||||
{
|
||||
EnumSet<ClassType> set = blocked.get(s);
|
||||
if(set == null)
|
||||
{
|
||||
set = EnumSet.noneOf(ClassType.class);
|
||||
blocked.put(s, set);
|
||||
}
|
||||
set.add(type);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createProcesses(String name, Consumer<TemplateProcess> acceptor)
|
||||
{
|
||||
String splitter = biRequired.get(name);
|
||||
boolean valueRequired = enumRequired.contains(name);
|
||||
List<GlobalVariables> vars = getVariablesByClass(name, splitter != null);
|
||||
EnumSet<ClassType> types = blocked.get(name);
|
||||
for(int i = 0,m=vars.size();i<m;i++)
|
||||
{
|
||||
GlobalVariables type = vars.get(i);
|
||||
if(types == null || !types.contains(type.getType()))
|
||||
{
|
||||
acceptor.accept(type.create(nameRemapper.getOrDefault(name, "%s"+name), splitter, valueRequired));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected List<GlobalVariables> getVariablesByClass(String name, boolean bi) {
|
||||
if(enumRequired.contains(name)) return enumVariables;
|
||||
if(bi) return biVariables;
|
||||
return variables;
|
||||
}
|
||||
|
||||
public static void main(String...args)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(args.length == 0) {
|
||||
new PrimitiveCollectionsBuilder().process(false);
|
||||
} else if(args.length == 3) {
|
||||
new PrimitiveCollectionsBuilder(Paths.get(args[0]), Paths.get(args[1]), Paths.get(args[2])).process(false);
|
||||
} else {
|
||||
System.out.println("Invalid argument count passed in");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
catch(InterruptedException e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
catch(IOException e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
package speiger.src.builder;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import speiger.src.builder.modules.AsyncModule;
|
||||
import speiger.src.builder.modules.BaseModule;
|
||||
import speiger.src.builder.modules.CollectionModule;
|
||||
import speiger.src.builder.modules.FunctionModule;
|
||||
import speiger.src.builder.modules.JavaModule;
|
||||
import speiger.src.builder.modules.ListModule;
|
||||
import speiger.src.builder.modules.MapModule;
|
||||
import speiger.src.builder.modules.PairModule;
|
||||
import speiger.src.builder.modules.PrioQueueModule;
|
||||
import speiger.src.builder.modules.SetModule;
|
||||
import speiger.src.builder.processor.TemplateProcess;
|
||||
import speiger.src.builder.processor.TemplateProcessor;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class PrimitiveCollectionsBuilder extends TemplateProcessor
|
||||
{
|
||||
private static final int SPECIAL = 0x1; //Detects if the Builder is generating tests
|
||||
private static final int LOAD = 0x2; //If Configs should be loaded
|
||||
private static final int ANTI_SAVE = SPECIAL | LOAD; //If save should be disabled since load/save shouldn't happen at the same time.
|
||||
private static final int SAVE = 0x4; //if the configuration should be created
|
||||
Set<String> globalFlags = new HashSet<>();
|
||||
List<ModulePackage> simplePackages = new ArrayList<>();
|
||||
List<ModulePackage> biPackages = new ArrayList<>();
|
||||
List<ModulePackage> enumPackages = new ArrayList<>();
|
||||
Map<String, RequiredType> requirements = new HashMap<>();
|
||||
SettingsManager manager = new SettingsManager();
|
||||
int flags;
|
||||
|
||||
public PrimitiveCollectionsBuilder() {
|
||||
this(false);
|
||||
}
|
||||
|
||||
public PrimitiveCollectionsBuilder(boolean silencedSuccess) {
|
||||
super(silencedSuccess, Paths.get("src/builder/resources/speiger/assets/collections/templates/"), Paths.get("src/main/java/speiger/src/collections/"), Paths.get("src/builder/resources/speiger/assets/collections/"));
|
||||
}
|
||||
|
||||
public PrimitiveCollectionsBuilder(Path sourceFolder, Path outputFolder, Path dataFolder) {
|
||||
this(false, sourceFolder, outputFolder, dataFolder);
|
||||
}
|
||||
|
||||
public PrimitiveCollectionsBuilder(boolean silencedSuccess, Path sourceFolder, Path outputFolder, Path dataFolder) {
|
||||
super(silencedSuccess, sourceFolder, outputFolder, dataFolder);
|
||||
}
|
||||
|
||||
private PrimitiveCollectionsBuilder setFlags(int flags) {
|
||||
this.flags = flags;
|
||||
if((flags & ANTI_SAVE) != 0) {
|
||||
this.flags &= ~SAVE;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private static PrimitiveCollectionsBuilder createTests(boolean silent, int flags) {
|
||||
return new PrimitiveCollectionsBuilder(silent,
|
||||
Paths.get("src/builder/resources/speiger/assets/tests/templates/"),
|
||||
Paths.get("src/test/java/speiger/src/tests/"),
|
||||
Paths.get("src/builder/resources/speiger/assets/tests/")).setFlags(flags | SPECIAL);
|
||||
}
|
||||
|
||||
private static PrimitiveCollectionsBuilder createTesters(boolean silent, int flags) {
|
||||
return new PrimitiveCollectionsBuilder(silent,
|
||||
Paths.get("src/builder/resources/speiger/assets/testers/templates/"),
|
||||
Paths.get("src/test/java/speiger/src/testers/"),
|
||||
Paths.get("src/builder/resources/speiger/assets/testers/")).setFlags(flags | SPECIAL);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isFileValid(Path fileName) { return true; }
|
||||
@Override
|
||||
protected boolean relativePackages() { return true; }
|
||||
@Override
|
||||
protected boolean debugUnusedMappers() { return false; }
|
||||
|
||||
@Override
|
||||
protected void afterFinish() {
|
||||
if((flags & SPECIAL) == 0 && getVersion() > 8) {
|
||||
Path basePath = Paths.get("src/main/java");
|
||||
try(BufferedWriter writer = Files.newBufferedWriter(basePath.resolve("module-info.java"))) {
|
||||
writer.write(getModuleInfo(basePath));
|
||||
}
|
||||
catch(Exception e) { e.printStackTrace(); }
|
||||
}
|
||||
}
|
||||
|
||||
public List<BaseModule> createModules() {
|
||||
List<BaseModule> modules = new ArrayList<>();
|
||||
modules.add(JavaModule.INSTANCE);
|
||||
modules.add(FunctionModule.INSTANCE);
|
||||
modules.add(CollectionModule.INSTANCE);
|
||||
modules.add(PrioQueueModule.INSTANCE);
|
||||
modules.add(ListModule.INSTANCE);
|
||||
modules.add(SetModule.INSTANCE);
|
||||
modules.add(MapModule.INSTANCE);
|
||||
modules.add(PairModule.INSTANCE);
|
||||
modules.add(AsyncModule.INSTANCE);
|
||||
return modules;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void init() {
|
||||
prepPackages();
|
||||
//Init Modules here
|
||||
addModules(createModules());
|
||||
finishPackages();
|
||||
}
|
||||
|
||||
public void addModules(List<BaseModule> modules) {
|
||||
for(int i = 0,m=modules.size();i<m;i++) {
|
||||
modules.get(i).setManager(manager);
|
||||
}
|
||||
manager.resolve();
|
||||
for(int i = 0,m=modules.size();i<m;i++) {
|
||||
biPackages.forEach(modules.get(i)::init);
|
||||
}
|
||||
modules.forEach(BaseModule::cleanup);
|
||||
}
|
||||
|
||||
private void finishPackages() {
|
||||
biPackages.forEach(ModulePackage::finish);
|
||||
if((flags & SAVE) != 0) manager.save();
|
||||
}
|
||||
|
||||
private void prepPackages() {
|
||||
if((flags & LOAD) != 0) manager.load();
|
||||
for(ModulePackage entry : ModulePackage.createPackages(globalFlags)) {
|
||||
entry.setRequirements(requirements::put);
|
||||
biPackages.add(entry);
|
||||
if(entry.isSame()) simplePackages.add(entry);
|
||||
if(entry.isEnumValid()) enumPackages.add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createProcesses(String fileName, Consumer<TemplateProcess> process) {
|
||||
List<ModulePackage> packages = getPackagesByRequirement(requirements.get(fileName));
|
||||
for(int i = 0,m=packages.size();i<m;i++) {
|
||||
packages.get(i).process(fileName, process);
|
||||
}
|
||||
}
|
||||
|
||||
protected List<ModulePackage> getPackagesByRequirement(RequiredType type) {
|
||||
if(type == null) return simplePackages;
|
||||
if(type == RequiredType.BI_CLASS) return biPackages;
|
||||
if(type == RequiredType.ENUM) return enumPackages;
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private String getModuleInfo(Path basePath) {
|
||||
StringJoiner joiner = new StringJoiner("\n", "", "\n");
|
||||
try(Stream<Path> stream = Files.walk(getOutputFolder())) {
|
||||
stream.filter(Files::isDirectory)
|
||||
.filter(this::containsFiles)
|
||||
.map(basePath::relativize)
|
||||
.map(Path::toString)
|
||||
.map(this::sanitize)
|
||||
.forEach(T -> joiner.add("\texports "+T+";"));
|
||||
}
|
||||
catch(Exception e) {
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("/** @author Speiger */\n");
|
||||
builder.append("module ").append(sanitize(basePath.relativize(getOutputFolder()).toString())).append(" {\n");
|
||||
builder.append(joiner.toString()).append("}");
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private String sanitize(String input) {
|
||||
return input.replace("\\", ".").replace("/", ".");
|
||||
}
|
||||
|
||||
private boolean containsFiles(Path path) {
|
||||
try(Stream<Path> stream = Files.walk(path, 1)) {
|
||||
return stream.filter(Files::isRegularFile).findFirst().isPresent();
|
||||
}
|
||||
catch(Exception e) { e.printStackTrace(); }
|
||||
return false;
|
||||
}
|
||||
|
||||
private int getVersion() {
|
||||
String version = System.getProperty("java.version");
|
||||
if(version.startsWith("1.")) return Integer.parseInt(version.substring(2, 3));
|
||||
int dot = version.indexOf(".");
|
||||
return Integer.parseInt(dot != -1 ? version.substring(0, dot) : version);
|
||||
}
|
||||
|
||||
public static void main(String...args) {
|
||||
try
|
||||
{
|
||||
Set<String> flags = new HashSet<>(Arrays.asList(args));
|
||||
boolean silent = flags.contains("silent");
|
||||
boolean force = flags.contains("force");
|
||||
boolean tests = flags.contains("tests");
|
||||
boolean forceTests = flags.contains("force-tests");
|
||||
boolean load = flags.contains("load");
|
||||
boolean save = flags.contains("save");
|
||||
int flag = (load ? LOAD : 0) | (save ? SAVE : 0);
|
||||
new PrimitiveCollectionsBuilder(silent).setFlags(flag).process(force);
|
||||
if(tests) {
|
||||
createTests(silent, flag).process(force || forceTests);
|
||||
createTesters(silent, flag).process(force || forceTests);
|
||||
}
|
||||
}
|
||||
catch(InterruptedException | IOException e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
8
src/builder/java/speiger/src/builder/RequiredType.java
Normal file
8
src/builder/java/speiger/src/builder/RequiredType.java
Normal file
@ -0,0 +1,8 @@
|
||||
package speiger.src.builder;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public enum RequiredType
|
||||
{
|
||||
BI_CLASS,
|
||||
ENUM
|
||||
}
|
||||
217
src/builder/java/speiger/src/builder/SettingsManager.java
Normal file
217
src/builder/java/speiger/src/builder/SettingsManager.java
Normal file
@ -0,0 +1,217 @@
|
||||
package speiger.src.builder;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import com.google.gson.JsonParser;
|
||||
import com.google.gson.internal.Streams;
|
||||
import com.google.gson.stream.JsonWriter;
|
||||
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.IDependency.LoadingState;
|
||||
import speiger.src.builder.modules.BaseModule;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class SettingsManager
|
||||
{
|
||||
boolean loaded;
|
||||
Map<String, LoadingState> parsedData = new TreeMap<>();
|
||||
JsonObject data = new JsonObject();
|
||||
Set<String> moduleNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
|
||||
Set<IDependency> allDependencies = new LinkedHashSet<>();
|
||||
|
||||
public void resolve() {
|
||||
if(!loaded) return;
|
||||
Set<IDependency> roots = new LinkedHashSet<>();
|
||||
Set<IDependency> leafs = new LinkedHashSet<>();
|
||||
for(IDependency entry : allDependencies) {
|
||||
if(entry.isRoot()) {
|
||||
roots.add(entry);
|
||||
}
|
||||
if(entry.isLeaf()) {
|
||||
leafs.add(entry);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This has to be 2 iteration passes.
|
||||
* Due to Key Value Pairs, first pass does all initials keys, and the second pass processes the values.
|
||||
* May require more passes but extremely unlikely
|
||||
*/
|
||||
for(int i = 0;i<2;i++) {
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
for(ClassType valueType : ModulePackage.TYPE) {
|
||||
for(IDependency entry : roots) {
|
||||
entry.resolveRequirements(keyType, valueType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<String> errors = new ArrayList<>();
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
for(ClassType valueType : ModulePackage.TYPE) {
|
||||
for(IDependency entry : leafs) {
|
||||
entry.validateDependency(errors::add, keyType, valueType);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(errors.size() > 0) {
|
||||
throw new IllegalStateException("Issues with dependencies found: "+String.join("\n", errors));
|
||||
}
|
||||
}
|
||||
|
||||
public void addModule(BaseModule module) {
|
||||
if(loaded) {
|
||||
if(module.isBiModule()) {
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
for(ClassType valueType : ModulePackage.TYPE) {
|
||||
if(!module.isModuleValid(keyType, valueType)) continue;
|
||||
for(IDependency dependency : module.getDependencies(keyType, valueType)) {
|
||||
dependency.set(parsedData);
|
||||
allDependencies.add(dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
if(!module.isModuleValid(keyType, keyType)) continue;
|
||||
for(IDependency dependency : module.getDependencies(keyType, keyType)) {
|
||||
dependency.set(parsedData);
|
||||
allDependencies.add(dependency);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
String moduleName = module.getModuleName();
|
||||
moduleNames.add(moduleName);
|
||||
data.addProperty(moduleName, true);
|
||||
if(module.isBiModule()) {
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
for(ClassType valueType : ModulePackage.TYPE) {
|
||||
if(!module.isModuleValid(keyType, valueType)) continue;
|
||||
JsonObject obj = new JsonObject();
|
||||
for(IDependency dependency : module.getDependencies(keyType, valueType)) {
|
||||
String key = dependency.getName();
|
||||
if(key != null) obj.addProperty(key, true);
|
||||
}
|
||||
addModule(keyType, valueType, true, moduleName, obj);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
if(!module.isModuleValid(keyType, keyType)) continue;
|
||||
JsonObject obj = new JsonObject();
|
||||
for(IDependency dependency : module.getDependencies(keyType, keyType)) {
|
||||
String key = dependency.getName();
|
||||
if(key != null) obj.addProperty(key, true);
|
||||
}
|
||||
addModule(keyType, keyType, false, moduleName, obj);
|
||||
}
|
||||
}
|
||||
|
||||
public void printModuleSettings(List<BaseModule> modules) {
|
||||
JsonObject data = new JsonObject();
|
||||
for(BaseModule module : modules) {
|
||||
String moduleName = module.getModuleName();
|
||||
if(module.isBiModule()) {
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
for(ClassType valueType : ModulePackage.TYPE) {
|
||||
if(!module.isModuleValid(keyType, valueType)) continue;
|
||||
JsonObject obj = new JsonObject();
|
||||
for(IDependency dependency : module.getDependencies(keyType, valueType)) {
|
||||
String key = dependency.getName();
|
||||
if(key != null) obj.addProperty(key, dependency.isLoaded(keyType, valueType).getJsonResult());
|
||||
}
|
||||
addModule(data, keyType, valueType, true, moduleName, obj);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for(ClassType keyType : ModulePackage.TYPE) {
|
||||
if(!module.isModuleValid(keyType, keyType)) continue;
|
||||
JsonObject obj = new JsonObject();
|
||||
for(IDependency dependency : module.getDependencies(keyType, keyType)) {
|
||||
String key = dependency.getName();
|
||||
if(key != null) obj.addProperty(key, dependency.isLoaded(keyType, keyType).getJsonResult());
|
||||
}
|
||||
addModule(data, keyType, keyType, false, moduleName, obj);
|
||||
}
|
||||
}
|
||||
try {
|
||||
System.out.println();
|
||||
JsonWriter writer = new JsonWriter(new OutputStreamWriter(System.out));
|
||||
writer.setIndent("\t");
|
||||
Streams.write(data, writer);
|
||||
writer.flush();
|
||||
System.out.println();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public void load() {
|
||||
try(BufferedReader reader = Files.newBufferedReader(Paths.get("ModulSettings.json"))) {
|
||||
data = JsonParser.parseReader(reader).getAsJsonObject();
|
||||
loaded = true;
|
||||
IDependency.flatten("", false, data, parsedData);
|
||||
JsonElement element = data.get("Default");
|
||||
LoadingState.setOptionalResolver(LoadingState.of(element == null ? true : element.getAsBoolean()));
|
||||
}
|
||||
catch(Exception e) { e.printStackTrace(); }
|
||||
}
|
||||
|
||||
public void save() {
|
||||
data.asMap().keySet().removeAll(moduleNames);
|
||||
JsonObject result = new JsonObject();
|
||||
for(String s : moduleNames) {
|
||||
result.addProperty(s, true);
|
||||
}
|
||||
result.asMap().putAll(data.asMap());
|
||||
|
||||
try(JsonWriter writer = new JsonWriter(Files.newBufferedWriter(Paths.get("ModulSettings.json")))) {
|
||||
writer.setIndent("\t");
|
||||
Streams.write(result, writer);
|
||||
}
|
||||
catch(Exception e) { e.printStackTrace(); }
|
||||
}
|
||||
|
||||
private void addModule(JsonObject data, ClassType keyType, ClassType valueType, boolean bi, String moduleName, JsonObject obj) {
|
||||
JsonObject result = getObject(data, keyType.getClassPath(), true);
|
||||
if(bi) {
|
||||
result = getObject(result, valueType.getClassPath(), true);
|
||||
}
|
||||
result.add(moduleName, obj);
|
||||
}
|
||||
|
||||
private void addModule(ClassType keyType, ClassType valueType, boolean bi, String moduleName, JsonObject obj) {
|
||||
JsonObject result = getObject(data, keyType.getClassPath(), true);
|
||||
if(bi) {
|
||||
result = getObject(result, valueType.getClassPath(), true);
|
||||
}
|
||||
result.add(moduleName, obj);
|
||||
}
|
||||
|
||||
private JsonObject getObject(JsonObject data, String name, boolean create) {
|
||||
JsonObject obj = data.getAsJsonObject(name);
|
||||
if(obj == null) {
|
||||
obj = new JsonObject();
|
||||
data.add(name, obj);
|
||||
if(create) obj.addProperty("Enabled", true);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,137 @@
|
||||
package speiger.src.builder.dependencies;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.Requirements.Requirement;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public abstract class BaseDependency implements IDependency {
|
||||
protected static boolean FETCH_FAILURES = false;
|
||||
protected static Set<String> FAILURE_KEYS = new TreeSet<>();
|
||||
|
||||
protected final String name;
|
||||
protected final boolean biType;
|
||||
protected Map<String, LoadingState> dependencies;
|
||||
protected List<IDependency> children = new ArrayList<>();
|
||||
protected List<Requirement> requirements = new ArrayList<>();
|
||||
protected ClassType keyType;
|
||||
protected ClassType valueType;
|
||||
|
||||
public BaseDependency(String name, boolean biType) {
|
||||
this.name = name;
|
||||
this.biType = biType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(Map<String, LoadingState> dependency) {
|
||||
dependencies = dependency;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IDependency addDependency(Requirement require) {
|
||||
requirements.add(require);
|
||||
require.dependency.addChild(this);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addChild(IDependency child) {
|
||||
children.add(child);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeaf() {
|
||||
return children.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isRoot() {
|
||||
return requirements.isEmpty();
|
||||
}
|
||||
|
||||
protected LoadingState getGlobalState() {
|
||||
return dependencies.getOrDefault(name, LoadingState.OPTIONAL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLocalStateKey(ClassType keyType, ClassType valueType) {
|
||||
return (biType ? keyType.getClassPath()+"-"+valueType.getClassPath() : keyType.getClassPath())+"-"+name;
|
||||
}
|
||||
|
||||
protected LoadingState getLocalState(ClassType keyType, ClassType valueType) {
|
||||
return dependencies.getOrDefault(getLocalStateKey(keyType, valueType), LoadingState.OPTIONAL);
|
||||
}
|
||||
|
||||
protected LoadingState getReqirementState(ClassType keyType, ClassType valueType) {
|
||||
LoadingState state = requirements.isEmpty() ? LoadingState.REQUIRED : LoadingState.OPTIONAL;
|
||||
for(int i = 0,m=requirements.size();i<m;i++) {
|
||||
state = state.merge(requirements.get(i).test(keyType, valueType));
|
||||
}
|
||||
return state.resolveIfUndefined();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void resolveRequirements(ClassType keyType, ClassType valueType) {
|
||||
if(!children.isEmpty()) {
|
||||
for(IDependency child : children) {
|
||||
if(child == this) continue;
|
||||
child.resolveRequirements(keyType, valueType);
|
||||
}
|
||||
}
|
||||
if(getLocalState(keyType, valueType) == LoadingState.REQUIRED) {
|
||||
for(Requirement req : requirements) {
|
||||
dependencies.putIfAbsent(req.key(keyType, valueType), LoadingState.REQUIRED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validateDependency(Consumer<String> result, ClassType keyType, ClassType valueType) {
|
||||
if(getLocalState(keyType, valueType) == LoadingState.REQUIRED) {
|
||||
FETCH_FAILURES = true;
|
||||
for(Requirement req : requirements) {
|
||||
req.test(keyType, valueType);
|
||||
}
|
||||
FETCH_FAILURES = false;
|
||||
if(FAILURE_KEYS.size() > 0) {
|
||||
int size = FAILURE_KEYS.size();
|
||||
StringJoiner joiner = new StringJoiner("], [", "[", "]");
|
||||
FAILURE_KEYS.forEach(joiner::add);
|
||||
FAILURE_KEYS.clear();
|
||||
String joins = size > 1 ? "["+joiner.toString()+"]" : joiner.toString();
|
||||
|
||||
result.accept("["+getLocalStateKey(keyType, valueType)+"] Requires "+joins+" but it specifically has been disabled!");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(ClassType key, ClassType value) {
|
||||
this.keyType = key;
|
||||
this.valueType = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
if(keyType == null || keyType == null) return false;
|
||||
return isLoaded(keyType, valueType).getJsonResult();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,29 @@
|
||||
package speiger.src.builder.dependencies;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class FunctionDependency extends BaseDependency {
|
||||
ModuleDependency owner;
|
||||
|
||||
public FunctionDependency(ModuleDependency owner, String name) {
|
||||
super(name, owner.biType);
|
||||
this.owner = owner;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LoadingState isLoaded(ClassType key, ClassType value) {
|
||||
if(dependencies == null) return LoadingState.REQUIRED;
|
||||
LoadingState result = getLocalState(key, value);
|
||||
if(FETCH_FAILURES && result == LoadingState.REJECTED) {
|
||||
FAILURE_KEYS.add(getLocalStateKey(key, value));
|
||||
}
|
||||
return result.resolveIfUndefined().merge(getReqirementState(key, value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLocalStateKey(ClassType keyType, ClassType valueType) {
|
||||
return (biType ? keyType.getClassPath()+"-"+valueType.getClassPath() : keyType.getClassPath())+"-"+owner.getName()+"-"+name;
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,104 @@
|
||||
package speiger.src.builder.dependencies;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import com.google.gson.JsonPrimitive;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.Requirements.Requirement;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public interface IDependency {
|
||||
|
||||
|
||||
public void set(Map<String, LoadingState> dependency);
|
||||
public void set(ClassType key, ClassType value);
|
||||
public LoadingState isLoaded(ClassType key, ClassType value);
|
||||
public String getLocalStateKey(ClassType keyType, ClassType valueType);
|
||||
public boolean isEnabled();
|
||||
public boolean isLeaf();
|
||||
public boolean isRoot();
|
||||
|
||||
public String getName();
|
||||
public void validateDependency(Consumer<String> result, ClassType keyType, ClassType valueType);
|
||||
public void resolveRequirements(ClassType keyType, ClassType valueType);
|
||||
|
||||
public void addChild(IDependency child);
|
||||
public <T extends IDependency> T addDependency(Requirement require);
|
||||
public default <T extends IDependency> T addKeyDependency(IDependency dependency) { return addDependency(new Requirement(dependency, Requirements.KEY_TEST, Requirements.KEY_GETTER)); }
|
||||
public default <T extends IDependency> T addValueDependency(IDependency dependency) { return addDependency(new Requirement(dependency, Requirements.VALUE_TEST, Requirements.VALUE_GETTER)); }
|
||||
public default <T extends IDependency> T addEntryDependency(IDependency dependency) { return addDependency(new Requirement(dependency, Requirements.ENTRY_TEST, Requirements.ENTRY_GETTER)); }
|
||||
public default <T extends IDependency> T addTypeDependency(IDependency dependency, ClassType type) { return addDependency(new Requirement(dependency, Requirements.typedTest(type), Requirements.typedKey(type))); }
|
||||
public default <T extends IDependency> T addOptionalTypeDependency(IDependency dependency, ClassType type, boolean key) { return addDependency(new Requirement(dependency, Requirements.optionalTest(type, key), Requirements.optionalKey(type, key))); }
|
||||
public default <T extends IDependency> T addOptionalTypeDependency(ClassType type, boolean key) { return addDependency(new Requirement(this, Requirements.optionalTest(type, key), Requirements.optionalKey(type, key))); }
|
||||
|
||||
|
||||
public static void flatten(String prefix, boolean applyMiddle, JsonObject object, Map<String, LoadingState> result) {
|
||||
if(applyMiddle) prefix+="-";
|
||||
for(Entry<String, JsonElement> entry : object.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
JsonElement value = entry.getValue();
|
||||
if(value instanceof JsonPrimitive) {
|
||||
String entryKey = prefix+key;
|
||||
if("Enabled".equalsIgnoreCase(key)) {
|
||||
entryKey = prefix.substring(0, prefix.length()-1);
|
||||
}
|
||||
result.put(entryKey, LoadingState.of(((JsonPrimitive)value).getAsBoolean()));
|
||||
}
|
||||
if(value instanceof JsonObject) {
|
||||
flatten(prefix+key, true, (JsonObject)value, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static enum LoadingState {
|
||||
OPTIONAL,
|
||||
REQUIRED,
|
||||
REJECTED;
|
||||
|
||||
private static LoadingState RESOLVED = LoadingState.REQUIRED;
|
||||
|
||||
public static LoadingState of(boolean value) {
|
||||
return value ? REQUIRED : REJECTED;
|
||||
}
|
||||
|
||||
public LoadingState merge(LoadingState merge) {
|
||||
return ordinal() > merge.ordinal() ? this : merge;
|
||||
}
|
||||
|
||||
public LoadingState replaceIfUndefined(LoadingState state) {
|
||||
return this == OPTIONAL ? state : this;
|
||||
}
|
||||
|
||||
public LoadingState resolveIfUndefined() {
|
||||
return this == OPTIONAL ? RESOLVED : this;
|
||||
}
|
||||
|
||||
public LoadingState mergeDown(LoadingState merge) {
|
||||
if(merge == REJECTED || ordinal() > merge.ordinal()) {
|
||||
return this;
|
||||
}
|
||||
return merge;
|
||||
}
|
||||
|
||||
public LoadingState mergeUp(LoadingState merge) {
|
||||
if(merge == REQUIRED || ordinal() > merge.ordinal()) {
|
||||
return this;
|
||||
}
|
||||
return merge;
|
||||
}
|
||||
|
||||
public static void setOptionalResolver(LoadingState state) {
|
||||
RESOLVED = state;
|
||||
}
|
||||
|
||||
public boolean getJsonResult() {
|
||||
LoadingState state = this == OPTIONAL ? RESOLVED : this;
|
||||
return state == REQUIRED;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,31 @@
|
||||
package speiger.src.builder.dependencies;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.modules.BaseModule;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class ModuleDependency extends BaseDependency {
|
||||
BaseModule owner;
|
||||
|
||||
public ModuleDependency(BaseModule owner, boolean biType) {
|
||||
super(owner.getModuleName(), biType);
|
||||
this.owner = owner;
|
||||
}
|
||||
|
||||
public FunctionDependency createDependency(String name) {
|
||||
FunctionDependency result = new FunctionDependency(this, name);
|
||||
if(biType) result.addEntryDependency(this);
|
||||
else result.addKeyDependency(this);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LoadingState isLoaded(ClassType key, ClassType value) {
|
||||
if(dependencies == null) return LoadingState.REQUIRED;
|
||||
LoadingState result = getLocalState(key, value);
|
||||
if(FETCH_FAILURES && result == LoadingState.REJECTED) {
|
||||
FAILURE_KEYS.add(getLocalStateKey(key, value));
|
||||
}
|
||||
return result.replaceIfUndefined(getGlobalState()).resolveIfUndefined().merge(getReqirementState(key, value));
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,68 @@
|
||||
package speiger.src.builder.dependencies;
|
||||
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.IDependency.LoadingState;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class Requirements {
|
||||
public static final RequirementTest KEY_TEST = (T, K, V) -> T.isLoaded(K, K);
|
||||
public static final RequirementTest VALUE_TEST = (T, K, V) -> T.isLoaded(V, V);
|
||||
public static final RequirementTest ENTRY_TEST = (T, K, V) -> T.isLoaded(K, V);
|
||||
|
||||
public static RequirementTest typedTest(ClassType type) {
|
||||
return (T, K, V) -> T.isLoaded(type, type);
|
||||
}
|
||||
|
||||
public static RequirementTest optionalTest(ClassType type, boolean key) {
|
||||
return (T, K, V) -> (key ? K : V) != type ? T.isLoaded(type, type) : LoadingState.REQUIRED;
|
||||
}
|
||||
|
||||
public static final RequirementKey KEY_GETTER = (T, K, V) -> T.getLocalStateKey(K, K);
|
||||
public static final RequirementKey VALUE_GETTER = (T, K, V) -> T.getLocalStateKey(V, V);
|
||||
public static final RequirementKey ENTRY_GETTER = (T, K, V) -> T.getLocalStateKey(K, V);
|
||||
|
||||
public static RequirementKey typedKey(ClassType type) {
|
||||
return (T, K, V) -> T.getLocalStateKey(type, type);
|
||||
}
|
||||
|
||||
public static RequirementKey optionalKey(ClassType type, boolean key) {
|
||||
return (T, K, V) -> (key ? K : V) != type ? T.getLocalStateKey(type, type) : "";
|
||||
}
|
||||
|
||||
|
||||
|
||||
public interface RequirementTest {
|
||||
public LoadingState test(IDependency test, ClassType keyType, ClassType valueType);
|
||||
}
|
||||
|
||||
public static interface RequirementKey {
|
||||
public String key(IDependency test, ClassType keyType, ClassType valueType);
|
||||
}
|
||||
|
||||
public static interface RequirementResolver {
|
||||
public void resolve(IDependency test, Consumer<String> result, ClassType keyType, ClassType valueType);
|
||||
}
|
||||
|
||||
public static class Requirement {
|
||||
IDependency dependency;
|
||||
RequirementTest test;
|
||||
RequirementKey key;
|
||||
|
||||
public Requirement(IDependency dependency, RequirementTest test, RequirementKey key) {
|
||||
this.dependency = dependency;
|
||||
this.test = test;
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
public LoadingState test(ClassType keyType, ClassType valueType) {
|
||||
return test.test(dependency, keyType, valueType);
|
||||
}
|
||||
|
||||
public String key(ClassType keyType, ClassType valueType) {
|
||||
return key.key(dependency, keyType, valueType);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,53 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class AsyncModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new AsyncModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, false).addKeyDependency(JavaModule.MODULE);
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "Async"; }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
@Override
|
||||
protected void loadRemappers() {}
|
||||
@Override
|
||||
protected void loadTestClasses() {}
|
||||
@Override
|
||||
protected void loadFunctions() {}
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) { return Arrays.asList(MODULE); }
|
||||
@Override
|
||||
protected void loadBlockades() {
|
||||
if(!MODULE.isEnabled()) {
|
||||
addBlockedFiles("AsyncBuilder", "Task");
|
||||
}
|
||||
}
|
||||
@Override
|
||||
protected void loadFlags() {
|
||||
if(MODULE.isEnabled()) {
|
||||
addKeyFlag("ASYNC_MODULE");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addClassMapper("ASYNC_BUILDER", "AsyncBuilder");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("BASE_TASK", "Base%sTask");
|
||||
|
||||
//Interfaces
|
||||
addClassMapper("TASK", "Task");
|
||||
}
|
||||
}
|
||||
198
src/builder/java/speiger/src/builder/modules/BaseModule.java
Normal file
198
src/builder/java/speiger/src/builder/modules/BaseModule.java
Normal file
@ -0,0 +1,198 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.ModulePackage;
|
||||
import speiger.src.builder.RequiredType;
|
||||
import speiger.src.builder.SettingsManager;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.mappers.ArgumentMapper;
|
||||
import speiger.src.builder.mappers.InjectMapper;
|
||||
import speiger.src.builder.mappers.LineMapper;
|
||||
import speiger.src.builder.mappers.SimpleMapper;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public abstract class BaseModule
|
||||
{
|
||||
SettingsManager manager;
|
||||
ModulePackage entry;
|
||||
protected ClassType keyType;
|
||||
protected ClassType valueType;
|
||||
|
||||
public final void setManager(SettingsManager manager) {
|
||||
this.manager = manager;
|
||||
manager.addModule(this);
|
||||
}
|
||||
|
||||
public final void init(ModulePackage entry) {
|
||||
this.entry = entry;
|
||||
keyType = entry.getKeyType();
|
||||
valueType = entry.getValueType();
|
||||
for(IDependency dependency : getDependencies(keyType, valueType)) {
|
||||
dependency.set(keyType, valueType);
|
||||
}
|
||||
loadVariables();
|
||||
loadClasses();
|
||||
loadTestClasses();
|
||||
loadFunctions();
|
||||
loadRemappers();
|
||||
loadBlockades();
|
||||
loadFlags();
|
||||
}
|
||||
|
||||
public final void cleanup() {
|
||||
entry = null;
|
||||
keyType = null;
|
||||
valueType = null;
|
||||
manager = null;
|
||||
}
|
||||
|
||||
protected abstract void loadVariables();
|
||||
protected abstract void loadClasses();
|
||||
protected abstract void loadTestClasses();
|
||||
protected abstract void loadFunctions();
|
||||
protected abstract void loadRemappers();
|
||||
protected abstract void loadBlockades();
|
||||
protected abstract void loadFlags();
|
||||
|
||||
public abstract String getModuleName();
|
||||
public boolean isBiModule() { return false; }
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) { return Collections.emptyList(); }
|
||||
public boolean isModuleValid(ClassType keyType, ClassType valueType) { return true; }
|
||||
|
||||
public ClassType keyType() { return keyType; }
|
||||
public ClassType valueType() { return valueType; }
|
||||
|
||||
protected void addFlag(String name) {
|
||||
entry.addFlag(name);
|
||||
}
|
||||
|
||||
protected void addValue(String name, int value) {
|
||||
entry.addValue(name, value);
|
||||
}
|
||||
|
||||
protected void addKeyFlag(String name) {
|
||||
entry.addFlag(name);
|
||||
entry.addGlobalFlag(keyType.getCapType()+"_"+name);
|
||||
}
|
||||
|
||||
protected void addBiRequirement(String fileName) {
|
||||
entry.addRequirement(fileName, RequiredType.BI_CLASS);
|
||||
entry.addSplitter(fileName, "%1$s2%2$s");
|
||||
}
|
||||
|
||||
protected void addEnumRequirement(String fileName) {
|
||||
entry.addRequirement(fileName, RequiredType.ENUM);
|
||||
entry.addSplitter(fileName, "%2$s");
|
||||
|
||||
}
|
||||
|
||||
protected void addBiRequirement(String fileName, String splitter) {
|
||||
entry.addRequirement(fileName, RequiredType.BI_CLASS);
|
||||
entry.addSplitter(fileName, "%1$s"+splitter+"%2$s");
|
||||
}
|
||||
|
||||
protected void addRequirement(String fileName, String splitter, RequiredType type) {
|
||||
entry.addRequirement(fileName, type);
|
||||
entry.addSplitter(fileName, splitter);
|
||||
}
|
||||
|
||||
protected void addRemapper(String fileName, String actualName) {
|
||||
entry.addRemapper(fileName, actualName);
|
||||
}
|
||||
|
||||
protected void addBlockedFiles(String... name) {
|
||||
entry.addBlockedFiles(name);
|
||||
}
|
||||
|
||||
protected void addBlockedFilter(Predicate<String> filter) {
|
||||
entry.addBlockedFilter(filter);
|
||||
}
|
||||
|
||||
protected void addClassMapper(String pattern, String replacement) {
|
||||
entry.addMapper(new SimpleMapper("VALUE_"+pattern, "VALUE_"+pattern, valueType.getFileType()+replacement));
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, keyType.getFileType()+replacement));
|
||||
}
|
||||
|
||||
protected void addBiClassMapper(String pattern, String replacement, String splitter) {
|
||||
entry.addMapper(new SimpleMapper("KEY_"+pattern, "KEY_"+pattern, keyType.getFileType()+splitter+keyType.getFileType()+replacement));
|
||||
entry.addMapper(new SimpleMapper("VALUE_"+pattern, "VALUE_"+pattern, valueType.getFileType()+splitter+valueType.getFileType()+replacement));
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, keyType.getFileType()+splitter+valueType.getFileType()+replacement));
|
||||
}
|
||||
|
||||
protected void addAbstractMapper(String pattern, String replacement) {
|
||||
entry.addMapper(new SimpleMapper("VALUE_"+pattern, "VALUE_"+pattern, String.format(replacement, valueType.getFileType())));
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, String.format(replacement, keyType.getFileType())));
|
||||
}
|
||||
|
||||
protected void addAbstractBiMapper(String pattern, String replacement, String splitter) {
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, String.format(replacement, keyType.getFileType()+splitter+valueType.getFileType())));
|
||||
}
|
||||
|
||||
protected void addFunctionMapper(String pattern, String replacement) {
|
||||
entry.addMapper(new SimpleMapper("VALUE_"+pattern, "VALUE_"+pattern, replacement+valueType.getNonFileType()));
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, replacement+keyType.getNonFileType()));
|
||||
}
|
||||
|
||||
protected void addFunctionValueMapper(String pattern, String replacement) {
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, replacement+valueType.getNonFileType()));
|
||||
}
|
||||
|
||||
protected void addFunctionMappers(String pattern, String replacement) {
|
||||
entry.addMapper(new SimpleMapper("VALUE_"+pattern, "VALUE_"+pattern, String.format(replacement, valueType.getNonFileType())));
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, String.format(replacement, keyType.getNonFileType())));
|
||||
}
|
||||
|
||||
protected void addFunctionValueMappers(String pattern, String replacement) {
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, String.format(replacement, valueType.getNonFileType())));
|
||||
}
|
||||
|
||||
protected void addSimpleMapper(String pattern, String replacement) {
|
||||
entry.addMapper(new SimpleMapper(pattern, pattern, replacement));
|
||||
}
|
||||
|
||||
protected void addAnnontion(String pattern, String value) {
|
||||
if(keyType == ClassType.OBJECT) entry.addMapper(new LineMapper(pattern, pattern));
|
||||
else entry.addMapper(new SimpleMapper(pattern, pattern, value));
|
||||
}
|
||||
|
||||
protected void addValueAnnontion(String pattern, String value) {
|
||||
if(valueType == ClassType.OBJECT) entry.addMapper(new LineMapper(pattern, pattern));
|
||||
else entry.addMapper(new SimpleMapper(pattern, pattern, value));
|
||||
}
|
||||
|
||||
protected void addComment(String pattern, String value) {
|
||||
if(keyType == ClassType.OBJECT) entry.addMapper(new InjectMapper(pattern, pattern, value).removeBraces());
|
||||
else entry.addMapper(new LineMapper(pattern, pattern));
|
||||
}
|
||||
|
||||
protected void addValueComment(String pattern, String value) {
|
||||
if(valueType == ClassType.OBJECT) entry.addMapper(new InjectMapper(pattern, pattern, value).removeBraces());
|
||||
else entry.addMapper(new LineMapper(pattern, pattern));
|
||||
}
|
||||
|
||||
protected InjectMapper addInjectMapper(String pattern, String replacement) {
|
||||
InjectMapper mapper = new InjectMapper(pattern, pattern, replacement);
|
||||
entry.addMapper(mapper);
|
||||
return mapper;
|
||||
}
|
||||
|
||||
protected ArgumentMapper addArgumentMapper(String pattern, String replacement) {
|
||||
return addArgumentMapper(pattern, replacement, ", ");
|
||||
}
|
||||
|
||||
protected ArgumentMapper addArgumentMapper(String pattern, String replacement, String splitter) {
|
||||
ArgumentMapper mapper = new ArgumentMapper(pattern, pattern, replacement, splitter);
|
||||
entry.addMapper(mapper);
|
||||
return mapper;
|
||||
}
|
||||
|
||||
public static <T> T make(T input, Consumer<T> processor) {
|
||||
processor.accept(input);
|
||||
return input;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,135 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.FunctionDependency;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class CollectionModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new CollectionModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, false)
|
||||
.addKeyDependency(FunctionModule.MODULE)
|
||||
.addOptionalTypeDependency(FunctionModule.MODULE, ClassType.OBJECT, true)
|
||||
.addOptionalTypeDependency(FunctionModule.MODULE, ClassType.INT, true)
|
||||
.addOptionalTypeDependency(ClassType.OBJECT, true);
|
||||
public static final FunctionDependency STREAMS = MODULE.createDependency("Streams");
|
||||
public static final FunctionDependency SPLIT_ITERATORS = MODULE.createDependency("Splititerators").addKeyDependency(STREAMS);
|
||||
public static final FunctionDependency IARRAY = MODULE.createDependency("IArray");
|
||||
public static final FunctionDependency STRATEGY = MODULE.createDependency("Strategy");
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "Collection"; }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) { return Arrays.asList(MODULE, STREAMS, SPLIT_ITERATORS, IARRAY, STRATEGY); }
|
||||
|
||||
@Override
|
||||
protected void loadFlags() {
|
||||
if(MODULE.isEnabled()) addKeyFlag("COLLECTION_MODULE");
|
||||
if(STREAMS.isEnabled()) addKeyFlag("STREAM_FEATURE");
|
||||
if(SPLIT_ITERATORS.isEnabled()) addKeyFlag("SPLIT_ITERATOR_FEATURE");
|
||||
if(IARRAY.isEnabled()) addKeyFlag("IARRAY_FEATURE");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadBlockades() {
|
||||
if(!MODULE.isEnabled()) {
|
||||
addBlockedFiles("Iterable", "Iterables", "Iterator", "Iterators", "BidirectionalIterator", "ListIterator");
|
||||
addBlockedFiles("Arrays", "Collection", "AbstractCollection", "Collections", "Stack");
|
||||
}
|
||||
if(!SPLIT_ITERATORS.isEnabled()) addBlockedFiles("Splititerator", "Splititerators");
|
||||
if(!IARRAY.isEnabled()) addBlockedFiles("IArray");
|
||||
if(!STRATEGY.isEnabled()) addBlockedFiles("Strategy");
|
||||
|
||||
if(keyType.isObject())
|
||||
{
|
||||
addBlockedFiles("Stack");
|
||||
addBlockedFiles("CollectionStreamTester");
|
||||
}
|
||||
if(keyType == ClassType.BOOLEAN)
|
||||
{
|
||||
addBlockedFiles("CollectionRemoveIfTester", "CollectionStreamTester");
|
||||
addBlockedFilter(T -> T.endsWith("Tester") && T.startsWith("Iterable"));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadRemappers()
|
||||
{
|
||||
//Main Classes
|
||||
addRemapper("IArray", "I%sArray");
|
||||
addRemapper("AbstractCollection", "Abstract%sCollection");
|
||||
|
||||
//Test Classes
|
||||
addRemapper("AbstractIteratorTester", "Abstract%sIteratorTester");
|
||||
addRemapper("MinimalCollection", "Minimal%sCollection");
|
||||
addRemapper("TestCollectionGenerator", "Test%sCollectionGenerator");
|
||||
addRemapper("AbstractContainerTester", "Abstract%sContainerTester");
|
||||
addRemapper("AbstractCollectionTester", "Abstract%sCollectionTester");
|
||||
addRemapper("SimpleTestGenerator", "Simple%sTestGenerator");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFunctions()
|
||||
{
|
||||
addFunctionMapper("NEXT", "next");
|
||||
addSimpleMapper("NEW_STREAM", keyType.isPrimitiveBlocking() ? "" : keyType.getCustomJDKType().getKeyType()+"Stream");
|
||||
addFunctionMapper("PREVIOUS", "previous");
|
||||
addFunctionMapper("REMOVE_KEY", "rem");
|
||||
addSimpleMapper("TO_ARRAY", "to"+keyType.getNonFileType()+"Array");
|
||||
addSimpleMapper("VALUE_TO_ARRAY", "to"+valueType.getNonFileType()+"Array");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses()
|
||||
{
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_COLLECTION", "Abstract%sCollection");
|
||||
|
||||
//Helper Classes
|
||||
addClassMapper("ARRAYS", "Arrays");
|
||||
addClassMapper("COLLECTIONS", "Collections");
|
||||
addClassMapper("ITERABLES", "Iterables");
|
||||
addClassMapper("SPLIT_ITERATORS", "Splititerators");
|
||||
addClassMapper("ITERATORS", "Iterators");
|
||||
|
||||
//Interfaces
|
||||
addClassMapper("COLLECTION", "Collection");
|
||||
addClassMapper("ITERABLE", "Iterable");
|
||||
addClassMapper("SPLIT_ITERATOR", "Splititerator");
|
||||
addClassMapper("LIST_ITERATOR", "ListIterator");
|
||||
addClassMapper("BI_ITERATOR", "BidirectionalIterator");
|
||||
addClassMapper("ITERATOR", "Iterator");
|
||||
if(keyType.isObject()) addSimpleMapper("STACK", "Stack");
|
||||
else addClassMapper("STACK", "Stack");
|
||||
addClassMapper("STRATEGY", "Strategy");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadTestClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addAbstractMapper("MINIMAL_COLLECTION", "Minimal%sCollection");
|
||||
addClassMapper("BIDIRECTIONAL_ITERATOR_TESTER", "BidirectionalteratorTester");
|
||||
addClassMapper("LIST_ITERATOR_TESTER", "ListIteratorTester");
|
||||
addClassMapper("ITERATOR_TESTER", "IteratorTester");
|
||||
addClassMapper("COLLECTION_TEST_BUILDER", "CollectionTestSuiteBuilder");
|
||||
addClassMapper("COLLECTION_CONSTRUCTOR_TESTS", "CollectionConstructorTests");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_COLLECTION_TESTER", "Abstract%sCollectionTester");
|
||||
addAbstractMapper("ABSTRACT_CONTAINER_TESTER", "Abstract%sContainerTester");
|
||||
addAbstractMapper("ABSTRACT_ITERATOR_TESTER", "Abstract%sIteratorTester");
|
||||
|
||||
//Helper Classes
|
||||
addAbstractMapper("TEST_COLLECTION_GENERATOR", "Test%sCollectionGenerator");
|
||||
addAbstractMapper("SIMPLE_TEST_GENERATOR", "Simple%sTestGenerator");
|
||||
|
||||
}
|
||||
}
|
||||
119
src/builder/java/speiger/src/builder/modules/FunctionModule.java
Normal file
119
src/builder/java/speiger/src/builder/modules/FunctionModule.java
Normal file
@ -0,0 +1,119 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.RequiredType;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class FunctionModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new FunctionModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, false).addKeyDependency(JavaModule.MODULE);
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "Function"; }
|
||||
@Override
|
||||
public boolean isBiModule() { return true; }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
@Override
|
||||
protected void loadFlags() {}
|
||||
@Override
|
||||
protected void loadTestClasses() {}
|
||||
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) {
|
||||
return Arrays.asList(MODULE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadBlockades()
|
||||
{
|
||||
if(keyType.isObject()) addBlockedFiles("Consumer", "Comparator");
|
||||
if(!MODULE.isEnabled()) addBlockedFiles("Consumer", "BiConsumer", "Comparator", "Supplier", "Function", "UnaryOperator");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadRemappers()
|
||||
{
|
||||
addBiRequirement("BiConsumer", "");
|
||||
addBiRequirement("UnaryOperator", "");
|
||||
if(valueType == ClassType.BOOLEAN) {
|
||||
addRequirement("Function", "%1$s", RequiredType.BI_CLASS);
|
||||
addRemapper("Function", (keyType.isObject() ? "" : "%s")+"Predicate");
|
||||
}
|
||||
else if(keyType.isObject() && !valueType.isObject()) {
|
||||
addRequirement("Function", "%2$s", RequiredType.BI_CLASS);
|
||||
addRemapper("Function", "To%sFunction");
|
||||
}
|
||||
else if(keyType == valueType) {
|
||||
addRequirement("Function", "%1$s", RequiredType.BI_CLASS);
|
||||
addRemapper("Function", (keyType.isObject() ? "" : "%s")+"UnaryOperator");
|
||||
}
|
||||
else if(valueType.isObject()) {
|
||||
addRequirement("Function", "%1$s", RequiredType.BI_CLASS);
|
||||
addRemapper("Function", "%sFunction");
|
||||
}
|
||||
else addBiRequirement("Function");
|
||||
addRemapper("BiConsumer", "%sConsumer");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFunctions()
|
||||
{
|
||||
addSimpleMapper("APPLY", keyType.getApply(valueType));
|
||||
addSimpleMapper("SUPPLY_GET", keyType.isObject() ? "get" : "getAs"+keyType.getNonFileType());
|
||||
addSimpleMapper("VALUE_SUPPLY_GET", valueType.isObject() ? "get" : "getAs"+valueType.getNonFileType());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses()
|
||||
{
|
||||
//Interfaces
|
||||
addBiClassMapper("BI_CONSUMER", "Consumer", "");
|
||||
addClassMapper("BI_TO_OBJECT_CONSUMER", "ObjectConsumer");
|
||||
addAbstractMapper("BI_FROM_OBJECT_CONSUMER", "Object%sConsumer");
|
||||
addAbstractMapper("BI_FROM_INT_CONSUMER", "Int%sConsumer");
|
||||
if(keyType.isObject()) {
|
||||
addSimpleMapper("TO_OBJECT_FUNCTION", keyType.getNonFileType()+"UnaryOperator");
|
||||
addSimpleMapper("VALUE_TO_OBJECT_FUNCTION", valueType.isObject() ? "UnaryOperator" : valueType.getFileType()+"Function");
|
||||
}
|
||||
else {
|
||||
addSimpleMapper("TO_OBJECT_FUNCTION", keyType.getNonFileType()+"Function");
|
||||
addSimpleMapper("VALUE_TO_OBJECT_FUNCTION", valueType.isObject() ? "UnaryOperator" : valueType.getFileType()+"Function");
|
||||
}
|
||||
if(valueType == ClassType.BOOLEAN) addFunctionMappers("FUNCTION", "%sPredicate");
|
||||
else if(keyType.isObject() && !valueType.isObject()) addFunctionValueMappers("FUNCTION", "To%sFunction");
|
||||
else if(keyType == valueType) addFunctionMappers("FUNCTION", "%sUnaryOperator");
|
||||
else if(valueType.isObject()) addFunctionMappers("FUNCTION", "%sFunction");
|
||||
else addBiClassMapper("FUNCTION", "Function", "2");
|
||||
|
||||
addFunctionMappers("PREDICATE", "%sPredicate");
|
||||
addClassMapper("SUPPLIER", "Supplier");
|
||||
addAbstractMapper("SINGLE_UNARY_OPERATOR", "%1$s%1$sUnaryOperator");
|
||||
addBiClassMapper("UNARY_OPERATOR", "UnaryOperator", "");
|
||||
if(keyType.isObject())
|
||||
{
|
||||
if(!valueType.isObject()) addSimpleMapper("VALUE_CONSUMER", valueType.getFileType()+"Consumer");
|
||||
else addSimpleMapper("VALUE_CONSUMER", "Consumer");
|
||||
addSimpleMapper("CONSUMER", "Consumer");
|
||||
addSimpleMapper("IARRAY", "IObjectArray");
|
||||
}
|
||||
else
|
||||
{
|
||||
if(valueType.isObject())
|
||||
{
|
||||
addSimpleMapper("VALUE_CONSUMER", "Consumer");
|
||||
addSimpleMapper("CONSUMER", keyType.getFileType()+"Consumer");
|
||||
}
|
||||
else addClassMapper("CONSUMER", "Consumer");
|
||||
addFunctionMappers("IARRAY", "I%sArray");
|
||||
}
|
||||
addSimpleMapper("VALUE_COMPARATOR", valueType.isObject() ? "Comparator" : String.format("%sComparator", valueType.getNonFileType()));
|
||||
addSimpleMapper("COMPARATOR", keyType.isObject() ? "Comparator" : String.format("%sComparator", keyType.getNonFileType()));
|
||||
}
|
||||
}
|
||||
232
src/builder/java/speiger/src/builder/modules/JavaModule.java
Normal file
232
src/builder/java/speiger/src/builder/modules/JavaModule.java
Normal file
@ -0,0 +1,232 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class JavaModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new JavaModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, false);
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "Base"; }
|
||||
@Override
|
||||
protected void loadVariables()
|
||||
{
|
||||
createHelperVars(keyType, false, "KEY");
|
||||
createHelperVars(valueType, true, "VALUE");
|
||||
loadBaseVariables();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) {
|
||||
return Arrays.asList(MODULE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFlags()
|
||||
{
|
||||
addFlag("TYPE_"+keyType.getCapType());
|
||||
addFlag("VALUE_"+valueType.getCapType());
|
||||
addValue("JAVA_VERSION", getVersion());
|
||||
if(keyType == valueType) addFlag("SAME_TYPE");
|
||||
if(keyType.hasFunction(valueType)) addFlag("JDK_FUNCTION");
|
||||
if(!keyType.needsCustomJDKType()) addFlag("JDK_TYPE");
|
||||
if(!keyType.isPrimitiveBlocking()) addFlag("PRIMITIVES");
|
||||
if(!valueType.isPrimitiveBlocking()) addFlag("VALUE_PRIMITIVES");
|
||||
if(!valueType.needsCustomJDKType()) addFlag("JDK_VALUE");
|
||||
}
|
||||
|
||||
private int getVersion() {
|
||||
String version = System.getProperty("java.version");
|
||||
if(version.startsWith("1.")) return Integer.parseInt(version.substring(2, 3));
|
||||
int dot = version.indexOf(".");
|
||||
return Integer.parseInt(dot != -1 ? version.substring(0, dot) : version);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadRemappers() {}
|
||||
@Override
|
||||
protected void loadBlockades() {}
|
||||
|
||||
@Override
|
||||
protected void loadFunctions()
|
||||
{
|
||||
addSimpleMapper("APPLY_KEY_VALUE", keyType.isObject() ? "apply" : "applyAs"+keyType.getNonFileType());
|
||||
addSimpleMapper("APPLY_VALUE", valueType.isObject() ? "apply" : "applyAs"+valueType.getNonFileType());
|
||||
addSimpleMapper("APPLY_CAST", "applyAs"+keyType.getCustomJDKType().getNonFileType());
|
||||
|
||||
//Shared by Maps and Pairs so moved to java.
|
||||
addFunctionMappers("ENTRY_KEY", "get%sKey");
|
||||
addFunctionValueMappers("ENTRY_VALUE", "get%sValue");
|
||||
addFunctionMappers("KEY_ENTRY", "set%sKey");
|
||||
addFunctionValueMappers("VALUE_ENTRY", "set%sValue");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses()
|
||||
{
|
||||
if(getVersion() >= 17) addSimpleMapper("RANDOM", "RandomGenerator");
|
||||
else addSimpleMapper("RANDOM", "Random");
|
||||
addSimpleMapper("JAVA_PREDICATE", keyType.isPrimitiveBlocking() ? "" : keyType.getCustomJDKType().getFileType()+"Predicate");
|
||||
addSimpleMapper("JAVA_CONSUMER", keyType.isPrimitiveBlocking() ? "" : "java.util.function."+keyType.getCustomJDKType().getFileType()+"Consumer");
|
||||
addSimpleMapper("JAVA_SUPPLIER", keyType.isPrimitiveBlocking() ? "" : "java.util.function."+keyType.getCustomJDKType().getFileType()+"Supplier");
|
||||
addSimpleMapper("JAVA_FUNCTION", keyType.getFunctionClass(valueType));
|
||||
addSimpleMapper("JAVA_BINARY_OPERATOR", keyType == ClassType.BOOLEAN ? "" : (keyType.isObject() ? "java.util.function.BinaryOperator" : "java.util.function."+keyType.getCustomJDKType().getFileType()+"BinaryOperator"));
|
||||
addSimpleMapper("JAVA_UNARY_OPERATOR", keyType.isObject() ? "BinaryOperator" : keyType == ClassType.BOOLEAN ? "" : keyType.getCustomJDKType().getFileType()+"UnaryOperator");
|
||||
addSimpleMapper("JAVA_SPLIT_ITERATOR", keyType.isPrimitiveBlocking() ? "Spliterator" : "Of"+keyType.getCustomJDKType().getFileType());
|
||||
addSimpleMapper("JAVA_STREAM", keyType.isPrimitiveBlocking() ? "" : keyType.getCustomJDKType().getFileType()+"Stream");
|
||||
addSimpleMapper("JAVA_BUFFER", keyType.getFileType()+"Buffer");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadTestClasses()
|
||||
{
|
||||
addClassMapper("HELPERS", "Helpers");
|
||||
addClassMapper("SAMPLE_ELEMENTS", "Samples");
|
||||
}
|
||||
|
||||
private void loadBaseVariables()
|
||||
{
|
||||
addSimpleMapper("VALUE_PACKAGE", valueType.getPathType());
|
||||
addSimpleMapper("PACKAGE", keyType.getPathType());
|
||||
addSimpleMapper("CLASS_TYPE", keyType.getClassType());
|
||||
addSimpleMapper("CLASS_VALUE_TYPE", valueType.getClassValueType());
|
||||
addSimpleMapper("KEY_TYPE", keyType.getKeyType());
|
||||
addSimpleMapper("KEY_OBJECT_TYPE", keyType.isObject() ? "Object" : keyType.getKeyType());
|
||||
addSimpleMapper("KEY_STRING_TYPE", keyType.isObject() ? "String" : keyType.getKeyType());
|
||||
addSimpleMapper("KEY_SPECIAL_TYPE", keyType.isObject() ? "E" : keyType.getKeyType());
|
||||
addSimpleMapper("CLASS_OBJECT_TYPE", keyType.getClassType());
|
||||
addSimpleMapper("CLASS_OBJECT_VALUE_TYPE", valueType.getClassValueType());
|
||||
addSimpleMapper("CLASS_STRING_TYPE", keyType.isObject() ? "String" : keyType.getClassType());
|
||||
addSimpleMapper("CLASS_STRING_VALUE_TYPE", valueType.isObject() ? "String" : valueType.getClassValueType());
|
||||
addSimpleMapper("VALUE_TYPE", valueType.getValueType());
|
||||
addSimpleMapper("VALUE_OBJECT_TYPE", valueType.isObject() ? "Object" : valueType.getValueType());
|
||||
addSimpleMapper("VALUE_STRING_TYPE", valueType.isObject() ? "String" : valueType.getValueType());
|
||||
addSimpleMapper("VALUE_SPECIAL_TYPE", valueType.isObject() ? "E" : valueType.getKeyType());
|
||||
addSimpleMapper("KEY_JAVA_TYPE", keyType.getCustomJDKType().getKeyType());
|
||||
addSimpleMapper("VALUE_JAVA_TYPE", keyType.getCustomJDKType().getKeyType());
|
||||
|
||||
addSimpleMapper("EMPTY_KEY_VALUE", keyType.getEmptyValue());
|
||||
addSimpleMapper("EMPTY_VALUE", valueType.getEmptyValue());
|
||||
|
||||
addSimpleMapper("INVALID_KEY_VALUE", keyType.getInvalidValue());
|
||||
addSimpleMapper("INVALID_VALUE", valueType.getInvalidValue());
|
||||
|
||||
addSimpleMapper(" KEY_STRING_GENERIC_TYPE", keyType.isObject() ? "<String>" : "");
|
||||
addSimpleMapper(" VALUE_STRING_GENERIC_TYPE", valueType.isObject() ? "<String>" : "");
|
||||
addSimpleMapper(" KEY_VALUE_STRING_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<String, String>" : "<String>") : (valueType.isObject() ? "<String>" : ""));
|
||||
|
||||
addSimpleMapper(" KEY_SAME_GENERIC_TYPE", keyType.isObject() ? "<T, T>" : "");
|
||||
addSimpleMapper(" VALUE_SAME_GENERIC_TYPE", keyType.isObject() ? "<V, V>" : "");
|
||||
|
||||
addSimpleMapper(" KEY_GENERIC_TYPE", keyType.isObject() ? "<"+keyType.getKeyType()+">" : "");
|
||||
addSimpleMapper(" KEY_KEY_GENERIC_TYPE", keyType.isObject() ? "<"+keyType.getKeyType()+", "+keyType.getKeyType()+">" : "");
|
||||
addSimpleMapper(" KEY_CLASS_GENERIC_TYPE", keyType.isObject() ? "<"+keyType.getClassType()+">" : "");
|
||||
|
||||
|
||||
addSimpleMapper(" VALUE_GENERIC_TYPE", valueType.isObject() ? "<"+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" VALUE_VALUE_GENERIC_TYPE", valueType.isObject() ? "<"+valueType.getValueType()+", "+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" VALUE_CLASS_GENERIC_TYPE", valueType.isObject() ? "<"+valueType.getClassValueType()+">" : "");
|
||||
|
||||
addSimpleMapper(" KEY_VALUE_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<"+keyType.getKeyType()+", "+valueType.getValueType()+">" : "<"+keyType.getKeyType()+">") : (valueType.isObject() ? "<"+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" KEY_VALUE_VALUE_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<"+keyType.getKeyType()+", "+valueType.getValueType()+", "+valueType.getValueType()+">" : "<"+keyType.getKeyType()+">") : (valueType.isObject() ? "<"+valueType.getValueType()+", "+valueType.getValueType()+">" : ""));
|
||||
addInjectMapper(" KEY_VALUE_SPECIAL_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<"+keyType.getKeyType()+", "+valueType.getValueType()+", %s>" : "<"+keyType.getKeyType()+", %s>") : (valueType.isObject() ? "<"+valueType.getValueType()+", %s>" : "<%s>")).setBraceType("<>").removeBraces();
|
||||
|
||||
addSimpleMapper(" NO_GENERIC_TYPE", keyType.isObject() ? "<?>" : "");
|
||||
addSimpleMapper(" NO_KV_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<?, ?>" : "<?>") : valueType.isObject() ? "<?>" : "");
|
||||
addSimpleMapper(" KEY_COMPAREABLE_TYPE", keyType.isObject() ? "<"+keyType.getKeyType()+" extends Comparable<T>>" : "");
|
||||
|
||||
addSimpleMapper(" KEY_SUPER_GENERIC_TYPE", keyType.isObject() ? "<? super "+keyType.getKeyType()+">" : "");
|
||||
addSimpleMapper(" VALUE_SUPER_GENERIC_TYPE", valueType.isObject() ? "<? super "+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" KEY_VALUE_SUPER_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<? super "+keyType.getKeyType()+", ? super "+valueType.getValueType()+">" : "<? super "+keyType.getKeyType()+">") : (valueType.isObject() ? "<? super "+valueType.getValueType()+">" : ""));
|
||||
|
||||
addSimpleMapper(" KEY_UNKNOWN_GENERIC_TYPE", keyType.isObject() ? "<? extends "+keyType.getKeyType()+">" : "");
|
||||
addSimpleMapper(" VALUE_UNKNOWN_GENERIC_TYPE", valueType.isObject() ? "<? extends "+valueType.getValueType()+">" : "");
|
||||
addSimpleMapper(" KEY_VALUE_UNKNOWN_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<? extends "+keyType.getKeyType()+", ? extends "+valueType.getValueType()+">" : "<? extends "+keyType.getKeyType()+">") : (valueType.isObject() ? "<? extends "+valueType.getValueType()+">" : ""));
|
||||
|
||||
addSimpleMapper(" KEY_ENUM_VALUE_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<"+keyType.getKeyType()+" extends Enum<"+keyType.getKeyType()+">, "+valueType.getValueType()+">" : "<"+keyType.getKeyType()+" extends Enum<"+keyType.getKeyType()+">>") : (valueType.isObject() ? "<"+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" KEY_VALUE_ENUM_GENERIC_TYPE", keyType.isObject() ? (valueType.isObject() ? "<"+keyType.getKeyType()+", "+valueType.getValueType()+" extends Enum<"+valueType.getValueType()+">>" : "<"+keyType.getKeyType()+">") : (valueType.isObject() ? "<"+valueType.getValueType()+" extends Enum<"+valueType.getValueType()+">>" : ""));
|
||||
|
||||
addInjectMapper(" KEY_SPECIAL_GENERIC_TYPE", keyType.isObject() ? "<%s>" : "").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" VALUE_SPECIAL_GENERIC_TYPE", valueType.isObject() ? "<%s>" : "").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" KSK_GENERIC_TYPE", keyType.isObject() ? "<%s, "+keyType.getKeyType()+">" : "<%s>").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" KKS_GENERIC_TYPE", keyType.isObject() ? "<"+keyType.getKeyType()+", %s>" : "<%s>").removeBraces().setBraceType("<>");
|
||||
addArgumentMapper(" KSS_GENERIC_TYPE", keyType.isObject() ? "<%1$s, %2$s>" : "<%2$s>").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" SK_GENERIC_TYPE", keyType.isObject() ? "<%s, "+keyType.getKeyType()+">" : "").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" KS_GENERIC_TYPE", keyType.isObject() ? "<"+keyType.getKeyType()+", %s>" : "").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" VSV_GENERIC_TYPE", valueType.isObject() ? "<%s, "+valueType.getValueType()+">" : "<%s>").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" VVS_GENERIC_TYPE", valueType.isObject() ? "<"+valueType.getValueType()+", %s>" : "<%s>").removeBraces().setBraceType("<>");
|
||||
addArgumentMapper(" VSS_GENERIC_TYPE", valueType.isObject() ? "<%1$s, %2$s>" : "<%2$s>").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" SV_GENERIC_TYPE", valueType.isObject() ? "<%s, "+valueType.getValueType()+">" : "").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" VS_GENERIC_TYPE", valueType.isObject() ? "<"+valueType.getValueType()+", %s>" : "").removeBraces().setBraceType("<>");
|
||||
|
||||
|
||||
addSimpleMapper(" GENERIC_KEY_BRACES", keyType.isObject() ? " <"+keyType.getKeyType()+">" : "");
|
||||
addSimpleMapper(" GENERIC_VALUE_BRACES", valueType.isObject() ? " <"+valueType.getValueType()+">" : "");
|
||||
addInjectMapper(" GENERIC_SPECIAL_KEY_BRACES", keyType.isObject() ? " <%s>" : "").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" GENERIC_SPECIAL_VALUE_BRACES", valueType.isObject() ? " <%s>" : "").removeBraces().setBraceType("<>");
|
||||
addSimpleMapper(" GENERIC_KEY_ENUM_VALUE_BRACES", keyType.isObject() ? (valueType.isObject() ? " <"+keyType.getKeyType()+" extends Enum<"+keyType.getKeyType()+">, "+valueType.getValueType()+">" : " <"+keyType.getKeyType()+" extends Enum<"+keyType.getKeyType()+">>") : (valueType.isObject() ? " <"+valueType.getValueType()+">" : ""));
|
||||
|
||||
addInjectMapper(" GENERIC_KEY_SPECIAL_BRACES", keyType.isObject() ? " <"+keyType.getKeyType()+", %s>" : " <%s>").removeBraces().setBraceType("<>");
|
||||
addInjectMapper(" GENERIC_VALUE_SPECIAL_BRACES", valueType.isObject() ? " <"+valueType.getKeyType()+", %s>" : " <%s>").removeBraces().setBraceType("<>");
|
||||
|
||||
addSimpleMapper(" GENERIC_KEY_VALUE_BRACES", keyType.isObject() ? (valueType.isObject() ? " <"+keyType.getKeyType()+", "+valueType.getValueType()+">" : " <"+keyType.getKeyType()+">") : (valueType.isObject() ? " <"+valueType.getValueType()+">" : ""));
|
||||
addSimpleMapper(" COMPAREABLE_KEY_BRACES", keyType.isObject() ? " <"+keyType.getKeyType()+" extends Comparable<T>>" : "");
|
||||
addSimpleMapper("KV_BRACES", keyType.isObject() || valueType.isObject() ? "<>" : "");
|
||||
addSimpleMapper("VALUE_BRACES", valueType.isObject() ? "<>" : "");
|
||||
addSimpleMapper("BRACES", keyType.isObject() ? "<>" : "");
|
||||
if(keyType.needsCustomJDKType())
|
||||
{
|
||||
addSimpleMapper("JAVA_TYPE", keyType.getCustomJDKType().getKeyType());
|
||||
addSimpleMapper("SANITY_CAST", "castTo"+keyType.getFileType());
|
||||
}
|
||||
addSimpleMapper("JAVA_CLASS", keyType.getCustomJDKType().getClassType());
|
||||
if(valueType.needsCustomJDKType())
|
||||
{
|
||||
addSimpleMapper("SANITY_CAST_VALUE", "castTo"+valueType.getFileType());
|
||||
}
|
||||
addSimpleMapper("[SPACE]", " ");
|
||||
addComment("@ArrayType", "@param <%s> the keyType of array that the operation should be applied");
|
||||
addComment("@Type", "@param <%s> the keyType of elements maintained by this Collection");
|
||||
addValueComment("@ValueArrayType", "@param <%s> the keyType of array that the operation should be applied");
|
||||
addValueComment("@ValueType", "@param <%s> the keyType of elements maintained by this Collection");
|
||||
addAnnontion("@PrimitiveOverride", "@Override");
|
||||
addSimpleMapper("@PrimitiveDoc", "");
|
||||
addAnnontion("@Primitive", "@Deprecated");
|
||||
addValueAnnontion("@ValuePrimitiveOverride", "@Override");
|
||||
addValueAnnontion("@ValuePrimitive", "@Deprecated");
|
||||
}
|
||||
|
||||
private void createHelperVars(ClassType type, boolean value, String fix)
|
||||
{
|
||||
addArgumentMapper("EQUALS_"+fix+"_TYPE", "Objects.equals(%2$s, "+(type.isObject() ? "%1$s" : fix+"_TO_OBJ(%1$s)")+")").removeBraces();
|
||||
addInjectMapper(fix+"_EQUALS_NOT_NULL", type.getComparableValue()+" != "+(type.isPrimitiveBlocking() || type.needsCast() ? type.getEmptyValue() : "0")).removeBraces();
|
||||
addInjectMapper(fix+"_EQUALS_NULL", type.getComparableValue()+" == "+(type.isPrimitiveBlocking() || type.needsCast() ? type.getEmptyValue() : "0")).removeBraces();
|
||||
addArgumentMapper(fix+"_EQUALS_NOT", type.getEquals(true)).removeBraces();
|
||||
addArgumentMapper(fix+"_EQUALS", type.getEquals(false)).removeBraces();
|
||||
addSimpleMapper("FILE_"+fix+"_TYPE", type.getFileType());
|
||||
|
||||
addArgumentMapper("COMPAREABLE_TO_"+fix, type.isObject() ? "((Comparable<"+type.getKeyType(value)+">)%1$s).compareTo(("+type.getKeyType(value)+")%2$s)" : type.getClassType(value)+".compare(%1$s, %2$s)").removeBraces();
|
||||
addArgumentMapper("COMPARE_TO_"+fix, type.isObject() ? "%1$s.compareTo(%2$s)" : type.getClassType(value)+".compare(%1$s, %2$s)").removeBraces();
|
||||
|
||||
addInjectMapper(fix+"_TO_OBJ", type.isObject() ? "%s" : type.getClassType(value)+".valueOf(%s)").removeBraces();
|
||||
addInjectMapper("OBJ_TO_"+fix, type.isObject() ? "%s" : "%s."+type.getKeyType(value)+"Value()").removeBraces();
|
||||
addInjectMapper("CLASS_TO_"+fix, type.isObject() ? "("+type.getKeyType(value)+")%s" : "(("+type.getClassType(value)+")%s)."+type.getKeyType(value)+"Value()").removeBraces();
|
||||
|
||||
addInjectMapper(fix+"_TO_HASH", type.isObject() ? "Objects.hashCode(%s)" : type.getClassType(value)+".hashCode(%s)").removeBraces();
|
||||
addInjectMapper(fix+"_TO_STRING", type.isObject() ? "Objects.toString(%s)" : type.getClassType(value)+".toString(%s)").removeBraces();
|
||||
|
||||
addSimpleMapper("CAST_"+fix+"_ARRAY ", type.isObject() ? "("+fix+"_TYPE[])" : "");
|
||||
addSimpleMapper("EMPTY_"+fix+"_ARRAY", type.isObject() ? "("+fix+"_TYPE[])ARRAYS.EMPTY_ARRAY" : "ARRAYS.EMPTY_ARRAY");
|
||||
addInjectMapper("NEW_"+fix+"_ARRAY", type.isObject() ? "("+fix+"_TYPE[])new Object[%s]" : "new "+fix+"_TYPE[%s]").removeBraces();
|
||||
addInjectMapper("NEW_SPECIAL_"+fix+"_ARRAY", type.isObject() ? "(E[])new Object[%s]" : "new "+fix+"_TYPE[%s]").removeBraces();
|
||||
if(value) addInjectMapper("NEW_CLASS_VALUE_ARRAY", type.isObject() ? "(CLASS_VALUE_TYPE[])new Object[%s]" : "new CLASS_VALUE_TYPE[%s]").removeBraces();
|
||||
else addInjectMapper("NEW_CLASS_ARRAY", type.isObject() ? "(CLASS_TYPE[])new Object[%s]" : "new CLASS_TYPE[%s]").removeBraces();
|
||||
}
|
||||
}
|
||||
120
src/builder/java/speiger/src/builder/modules/ListModule.java
Normal file
120
src/builder/java/speiger/src/builder/modules/ListModule.java
Normal file
@ -0,0 +1,120 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.FunctionDependency;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class ListModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new ListModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, false).addKeyDependency(CollectionModule.MODULE).addKeyDependency(CollectionModule.SPLIT_ITERATORS);
|
||||
public static final FunctionDependency IMPLEMENTATION = MODULE.createDependency("Implementations");
|
||||
public static final FunctionDependency WRAPPERS = MODULE.createDependency("Wrappers");
|
||||
public static final FunctionDependency ARRAY_LIST = MODULE.createDependency("ArrayList").addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency LINKED_LIST = MODULE.createDependency("LinkedList").addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency IMMUTABLE_LIST = MODULE.createDependency("ImmutableList").addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency COPY_ON_WRITE_LIST = MODULE.createDependency("CopyOnWriteList").addKeyDependency(IMPLEMENTATION);
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "List"; }
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) { return Arrays.asList(MODULE, IMPLEMENTATION, WRAPPERS, ARRAY_LIST, LINKED_LIST, IMMUTABLE_LIST, COPY_ON_WRITE_LIST); }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
@Override
|
||||
protected void loadFlags() {
|
||||
if(MODULE.isEnabled()) addKeyFlag("LIST_MODULE");
|
||||
if(WRAPPERS.isEnabled()) addKeyFlag("LISTS_FEATURE");
|
||||
if(ARRAY_LIST.isEnabled()) addKeyFlag("ARRAY_LIST_FEATURE");
|
||||
if(LINKED_LIST.isEnabled()) addKeyFlag("LINKED_LIST_FEATURE");
|
||||
if(IMMUTABLE_LIST.isEnabled()) addKeyFlag("IMMUTABLE_LIST_FEATURE");
|
||||
if(COPY_ON_WRITE_LIST.isEnabled()) addKeyFlag("COPY_ON_WRITE_LIST_FEATURE");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadBlockades()
|
||||
{
|
||||
if(!WRAPPERS.isEnabled()) addBlockedFiles("Lists");
|
||||
if(!ARRAY_LIST.isEnabled()) addBlockedFiles("ArrayList");
|
||||
if(!LINKED_LIST.isEnabled()) addBlockedFiles("LinkedList");
|
||||
if(!IMMUTABLE_LIST.isEnabled()) addBlockedFiles("ImmutableList");
|
||||
if(!COPY_ON_WRITE_LIST.isEnabled()) addBlockedFiles("CopyOnWriteList");
|
||||
if(!MODULE.isEnabled()) addBlockedFiles("List", "AbstractList");
|
||||
|
||||
|
||||
if(keyType.isObject()) addBlockedFiles("ListFillBufferTester");
|
||||
if(keyType == ClassType.BOOLEAN) addBlockedFiles("ListFillBufferTester", "ListReplaceAllTester");
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected void loadRemappers()
|
||||
{
|
||||
//Main Classes
|
||||
addRemapper("AbstractList", "Abstract%sList");
|
||||
addRemapper("ImmutableList", "Immutable%sList");
|
||||
addRemapper("CopyOnWriteList", "CopyOnWrite%sArrayList");
|
||||
|
||||
//Test Classes
|
||||
addRemapper("AbstractListTester", "Abstract%sListTester");
|
||||
addRemapper("AbstractListIndexOfTester", "Abstract%sListIndexOfTester");
|
||||
addRemapper("TestListGenerator", "Test%sListGenerator");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFunctions()
|
||||
{
|
||||
addFunctionMapper("GET_KEY", "get");
|
||||
addFunctionMapper("GET_FIRST_KEY", "getFirst");
|
||||
addFunctionMapper("GET_LAST_KEY", "getLast");
|
||||
addFunctionMapper("REMOVE_FIRST_KEY", "removeFirst");
|
||||
addFunctionMapper("REMOVE_LAST_KEY", "removeLast");
|
||||
addFunctionMapper("REMOVE_SWAP", "swapRemove");
|
||||
addFunctionMappers("REPLACE", keyType.isObject() ? "replaceObjects" : "replace%ss");
|
||||
addFunctionMappers("SORT", "sort%ss");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addClassMapper("ARRAY_LIST", "ArrayList");
|
||||
addAbstractMapper("COPY_ON_WRITE_LIST", "CopyOnWrite%sArrayList");
|
||||
addClassMapper("LINKED_LIST", "LinkedList");
|
||||
addAbstractMapper("IMMUTABLE_LIST", "Immutable%sList");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_LIST", "Abstract%sList");
|
||||
|
||||
//SubClasses
|
||||
addClassMapper("SUB_LIST", "SubList");
|
||||
addClassMapper("LIST_ITER", "ListIter");
|
||||
|
||||
//Helper Classes
|
||||
addClassMapper("LISTS", "Lists");
|
||||
|
||||
//Interfaces
|
||||
addClassMapper("LIST", "List");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadTestClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addClassMapper("LIST_TEST_BUILDER", "ListTestSuiteBuilder");
|
||||
addClassMapper("LIST_TESTS", "ListTests");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_LIST_INDEX_OF_TESTER", "Abstract%sListIndexOfTester");
|
||||
addAbstractMapper("ABSTRACT_LIST_TESTER", "Abstract%sListTester");
|
||||
|
||||
//Helper classes
|
||||
addAbstractMapper("TEST_LIST_GENERATOR", "Test%sListGenerator");
|
||||
}
|
||||
}
|
||||
290
src/builder/java/speiger/src/builder/modules/MapModule.java
Normal file
290
src/builder/java/speiger/src/builder/modules/MapModule.java
Normal file
@ -0,0 +1,290 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.FunctionDependency;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class MapModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new MapModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, true)
|
||||
.addKeyDependency(SetModule.MODULE)
|
||||
.addValueDependency(CollectionModule.MODULE)
|
||||
.addEntryDependency(SetModule.MODULE)
|
||||
.addTypeDependency(SetModule.MODULE, ClassType.OBJECT);
|
||||
public static final FunctionDependency IMPLEMENTATION = MODULE.createDependency("Implementations");
|
||||
public static final FunctionDependency WRAPPERS = MODULE.createDependency("Wrappers").addKeyDependency(SetModule.WRAPPERS).addOptionalTypeDependency(SetModule.WRAPPERS, ClassType.OBJECT, true);
|
||||
|
||||
public static final FunctionDependency ORDERED_MAP = MODULE.createDependency("OrderedMap").addKeyDependency(SetModule.ORDERED_SET).addOptionalTypeDependency(SetModule.ORDERED_SET, ClassType.OBJECT, true);
|
||||
public static final FunctionDependency SORTED_MAP = MODULE.createDependency("SortedMap").addKeyDependency(SetModule.SORTED_SET).addOptionalTypeDependency(SetModule.SORTED_SET, ClassType.OBJECT, true);
|
||||
|
||||
public static final FunctionDependency ARRAY_MAP = MODULE.createDependency("ArrayMap").addEntryDependency(ORDERED_MAP).addEntryDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency IMMUTABLE_MAP = MODULE.createDependency("ImmutableMap").addEntryDependency(IMPLEMENTATION);
|
||||
|
||||
public static final FunctionDependency HASH_MAP = MODULE.createDependency("HashMap").addEntryDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency LINKED_MAP = MODULE.createDependency("LinkedHashMap").addEntryDependency(HASH_MAP).addEntryDependency(ORDERED_MAP);
|
||||
|
||||
public static final FunctionDependency CUSTOM_MAP = MODULE.createDependency("CustomHashMap").addEntryDependency(IMPLEMENTATION).addKeyDependency(CollectionModule.STRATEGY);
|
||||
public static final FunctionDependency LINKED_CUSTOM_MAP = MODULE.createDependency("LinkedCustomHashMap").addEntryDependency(CUSTOM_MAP).addEntryDependency(ORDERED_MAP);
|
||||
|
||||
public static final FunctionDependency ENUM_MAP = MODULE.createDependency("EnumMap").addEntryDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency LINKED_ENUM_MAP = MODULE.createDependency("LinkedEnumMap").addEntryDependency(ENUM_MAP).addEntryDependency(ORDERED_MAP);
|
||||
|
||||
public static final FunctionDependency CONCURRENT_MAP = MODULE.createDependency("ConcurrentMap").addEntryDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency AVL_TREE_MAP = MODULE.createDependency("AVLTreeMap").addEntryDependency(SORTED_MAP).addEntryDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency RB_TREE_MAP = MODULE.createDependency("RBTreeMap").addEntryDependency(SORTED_MAP).addEntryDependency(IMPLEMENTATION);
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "Map"; }
|
||||
@Override
|
||||
public boolean isBiModule() { return true; }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
@Override
|
||||
public boolean isModuleValid(ClassType keyType, ClassType valueType) { return keyType != ClassType.BOOLEAN; }
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) {
|
||||
List<IDependency> dependencies = new ArrayList<>(Arrays.asList(MODULE, ORDERED_MAP, SORTED_MAP, IMPLEMENTATION, WRAPPERS, ARRAY_MAP, IMMUTABLE_MAP, HASH_MAP, LINKED_MAP, CUSTOM_MAP, LINKED_CUSTOM_MAP, CONCURRENT_MAP, AVL_TREE_MAP, RB_TREE_MAP));
|
||||
if(keyType == ClassType.OBJECT) dependencies.addAll(Arrays.asList(ENUM_MAP, LINKED_ENUM_MAP));
|
||||
return dependencies;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFlags()
|
||||
{
|
||||
if(MODULE.isEnabled()) addFlag("MAP_MODULE");
|
||||
if(WRAPPERS.isEnabled()) addFlag("MAPS_FEATURE");
|
||||
if(ORDERED_MAP.isEnabled()) addFlag("ORDERED_MAP_FEATURE");
|
||||
if(ARRAY_MAP.isEnabled()) addFlag("ARRAY_MAP_FEATURE");
|
||||
if(LINKED_MAP.isEnabled()) addFlag("LINKED_MAP_FEATURE");
|
||||
if(LINKED_CUSTOM_MAP.isEnabled()) addFlag("LINKED_CUSTOM_MAP_FEATURE");
|
||||
if(LINKED_ENUM_MAP.isEnabled()) addFlag("LINKED_ENUM_MAP_FEATURE");
|
||||
|
||||
if(SORTED_MAP.isEnabled()) addFlag("SORTED_MAP_FEATURE");
|
||||
if(AVL_TREE_MAP.isEnabled()) addFlag("AVL_TREE_MAP_FEATURE");
|
||||
if(RB_TREE_MAP.isEnabled()) addFlag("RB_TREE_MAP_FEATURE");
|
||||
|
||||
if(CONCURRENT_MAP.isEnabled()) addFlag("CONCURRENT_MAP_FEATURE");
|
||||
if(IMMUTABLE_MAP.isEnabled()) addFlag("IMMUTABLE_MAP_FEATURE");
|
||||
if(HASH_MAP.isEnabled()) addFlag("MAP_FEATURE");
|
||||
if(CUSTOM_MAP.isEnabled()) addFlag("CUSTOM_MAP_FEATURE");
|
||||
if(ENUM_MAP.isEnabled()) addFlag("ENUM_MAP_FEATURE");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadBlockades()
|
||||
{
|
||||
if(!MODULE.isEnabled()) addBlockedFiles("Map", "AbstractMap");
|
||||
if(!WRAPPERS.isEnabled()) addBlockedFiles("Maps");
|
||||
if(!IMMUTABLE_MAP.isEnabled()) addBlockedFiles("ImmutableOpenHashMap");
|
||||
if(!CONCURRENT_MAP.isEnabled()) addBlockedFiles("ConcurrentMap", "ConcurrentOpenHashMap");
|
||||
if(!ORDERED_MAP.isEnabled()) addBlockedFiles("OrderedMap");
|
||||
if(!HASH_MAP.isEnabled()) addBlockedFiles("OpenHashMap");
|
||||
if(!LINKED_MAP.isEnabled()) addBlockedFiles("LinkedOpenHashMap");
|
||||
if(!CUSTOM_MAP.isEnabled()) addBlockedFiles("OpenCustomHashMap");
|
||||
if(!LINKED_CUSTOM_MAP.isEnabled()) addBlockedFiles("LinkedOpenCustomHashMap");
|
||||
if(!ENUM_MAP.isEnabled()) addBlockedFiles("EnumMap");
|
||||
if(!LINKED_ENUM_MAP.isEnabled()) addBlockedFiles("LinkedEnumMap");
|
||||
if(!ARRAY_MAP.isEnabled()) addBlockedFiles("ArrayMap");
|
||||
if(!SORTED_MAP.isEnabled()) addBlockedFiles("SortedMap", "NavigableMap");
|
||||
if(!AVL_TREE_MAP.isEnabled()) addBlockedFiles("AVLTreeMap");
|
||||
if(!RB_TREE_MAP.isEnabled()) addBlockedFiles("RBTreeMap");
|
||||
|
||||
if(keyType == ClassType.BOOLEAN)
|
||||
{
|
||||
//Main Classes
|
||||
addBlockedFiles("SortedMap", "NavigableMap", "RBTreeMap", "AVLTreeMap");
|
||||
addBlockedFiles("OrderedMap", "ArrayMap", "LinkedOpenHashMap", "LinkedOpenCustomHashMap");
|
||||
addBlockedFiles("ConcurrentMap", "ConcurrentOpenHashMap");
|
||||
addBlockedFiles("Map", "Maps", "AbstractMap", "ImmutableOpenHashMap", "OpenHashMap", "OpenCustomHashMap");
|
||||
|
||||
//Test Classes
|
||||
addBlockedFiles("TestMap", "MapTests", "MapTestSuiteBuilder", "MapConstructorTests", "TestMapGenerator", "SimpleMapTestGenerator", "DerivedMapGenerators", "AbstractMapTester");
|
||||
addBlockedFiles("TestSortedMapGenerator", "OrderedMapTestSuiteBuilder", "NavigableMapTestSuiteBuilder", "SortedMapTestSuiteBuilder");
|
||||
addBlockedFiles("TestOrderedMapGenerator");
|
||||
addBlockedFilter(T -> T.endsWith("Tester") && (T.startsWith("Map") || T.startsWith("OrderedMap") || T.startsWith("SortedMap") || T.startsWith("NavigableMap")));
|
||||
}
|
||||
if(valueType == ClassType.OBJECT) {
|
||||
addBlockedFiles("MapComputeIfAbsentNonDefaultTester", "MapComputeIfPresentNonDefaultTester", "MapComputeNonDefaultTester", "MapSupplyIfAbsentNonDefaultTester");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadRemappers()
|
||||
{
|
||||
//Main Classes
|
||||
addBiRequirement("Map");
|
||||
addBiRequirement("SortedMap");
|
||||
addBiRequirement("OrderedMap");
|
||||
addBiRequirement("NavigableMap");
|
||||
addBiRequirement("ConcurrentMap");
|
||||
addBiRequirement("AbstractMap");
|
||||
addEnumRequirement("EnumMap");
|
||||
addEnumRequirement("LinkedEnumMap");
|
||||
addBiRequirement("ConcurrentOpenHashMap");
|
||||
addBiRequirement("ImmutableOpenHashMap");
|
||||
addBiRequirement("OpenHashMap");
|
||||
addBiRequirement("LinkedOpenHashMap");
|
||||
addBiRequirement("OpenCustomHashMap");
|
||||
addBiRequirement("LinkedOpenCustomHashMap");
|
||||
addBiRequirement("ArrayMap");
|
||||
addBiRequirement("RBTreeMap");
|
||||
addBiRequirement("AVLTreeMap");
|
||||
addBiRequirement("Maps");
|
||||
|
||||
addRemapper("AbstractMap", "Abstract%sMap");
|
||||
addRemapper("EnumMap", "Enum2%sMap");
|
||||
addRemapper("LinkedEnumMap", "LinkedEnum2%sMap");
|
||||
addRemapper("ImmutableOpenHashMap", "Immutable%sOpenHashMap");
|
||||
|
||||
//Test Classes
|
||||
addBiRequirement("TestMapGenerator");
|
||||
addBiRequirement("TestSortedMapGenerator");
|
||||
addBiRequirement("TestOrderedMapGenerator");
|
||||
addBiRequirement("SimpleMapTestGenerator");
|
||||
addBiRequirement("DerivedMapGenerators");
|
||||
addBiRequirement("AbstractMapTester");
|
||||
addBiRequirement("MapTestSuiteBuilder");
|
||||
addBiRequirement("SortedMapTestSuiteBuilder");
|
||||
addBiRequirement("NavigableMapTestSuiteBuilder");
|
||||
addBiRequirement("OrderedMapTestSuiteBuilder");
|
||||
addBiRequirement("MapTests");
|
||||
addBiRequirement("MapConstructorTests");
|
||||
addBiRequirement("TestMap");
|
||||
addBiRequirement("MapAddToTester");
|
||||
addBiRequirement("MapSubFromTester");
|
||||
addBiRequirement("MapClearTester");
|
||||
addBiRequirement("MapComputeIfAbsentTester");
|
||||
addBiRequirement("MapComputeIfPresentTester");
|
||||
addBiRequirement("MapComputeTester");
|
||||
addBiRequirement("MapComputeIfAbsentNonDefaultTester");
|
||||
addBiRequirement("MapComputeIfPresentNonDefaultTester");
|
||||
addBiRequirement("MapComputeNonDefaultTester");
|
||||
addBiRequirement("MapCopyTester");
|
||||
addBiRequirement("MapContainsTester");
|
||||
addBiRequirement("MapContainsKeyTester");
|
||||
addBiRequirement("MapContainsValueTester");
|
||||
addBiRequirement("MapCreatorTester");
|
||||
addBiRequirement("MapEntrySetTester");
|
||||
addBiRequirement("MapEqualsTester");
|
||||
addBiRequirement("MapForEachTester");
|
||||
addBiRequirement("MapGetOrDefaultTester");
|
||||
addBiRequirement("MapGetTester");
|
||||
addBiRequirement("MapHashCodeTester");
|
||||
addBiRequirement("MapIsEmptyTester");
|
||||
addBiRequirement("MapMergeTester");
|
||||
addBiRequirement("MapMergeBulkTester");
|
||||
addBiRequirement("MapPutAllArrayTester");
|
||||
addBiRequirement("MapPutAllTester");
|
||||
addBiRequirement("MapPutIfAbsentTester");
|
||||
addBiRequirement("MapPutTester");
|
||||
addBiRequirement("MapRemoveEntryTester");
|
||||
addBiRequirement("MapRemoveOrDefaultTester");
|
||||
addBiRequirement("MapRemoveTester");
|
||||
addBiRequirement("MapReplaceAllTester");
|
||||
addBiRequirement("MapReplaceEntryTester");
|
||||
addBiRequirement("MapReplaceTester");
|
||||
addBiRequirement("MapSizeTester");
|
||||
addBiRequirement("MapSupplyIfAbsentTester");
|
||||
addBiRequirement("MapSupplyIfAbsentNonDefaultTester");
|
||||
addBiRequirement("MapToStringTester");
|
||||
addBiRequirement("NavigableMapNavigationTester");
|
||||
addBiRequirement("SortedMapNavigationTester");
|
||||
addBiRequirement("OrderedMapNavigationTester");
|
||||
addBiRequirement("OrderedMapMoveTester");
|
||||
addBiRequirement("MapConstructorTester");
|
||||
|
||||
addRemapper("TestMapGenerator", "Test%sMapGenerator");
|
||||
addRemapper("TestSortedMapGenerator", "Test%sSortedMapGenerator");
|
||||
addRemapper("TestOrderedMapGenerator", "Test%sOrderedMapGenerator");
|
||||
addRemapper("SimpleMapTestGenerator", "Simple%sMapTestGenerator");
|
||||
addRemapper("DerivedMapGenerators", "Derived%sMapGenerators");
|
||||
addRemapper("AbstractMapTester", "Abstract%sMapTester");
|
||||
addRemapper("TestMap", "Test%sMap");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFunctions()
|
||||
{
|
||||
addFunctionValueMapper("BULK_MERGE", "mergeAll");
|
||||
addFunctionValueMappers("COMPUTE_IF_ABSENT", "compute%sIfAbsent");
|
||||
addFunctionValueMappers("COMPUTE_IF_PRESENT", "compute%sIfPresent");
|
||||
addFunctionValueMapper("COMPUTE", "compute");
|
||||
addFunctionMapper("DEQUEUE_LAST", "dequeueLast");
|
||||
addFunctionMapper("DEQUEUE", "dequeue");
|
||||
addSimpleMapper("ENTRY_SET", keyType.getFileType().toLowerCase()+"2"+valueType.getFileType()+"EntrySet");
|
||||
addFunctionMappers("FIRST_ENTRY_KEY", "first%sKey");
|
||||
addFunctionValueMappers("FIRST_ENTRY_VALUE", "first%sValue");
|
||||
if(keyType.isObject()) addFunctionValueMapper("GET_VALUE", valueType.isObject() ? "getObject" : "get");
|
||||
else addSimpleMapper("GET_VALUE", "get");
|
||||
addFunctionMappers("LAST_ENTRY_KEY", "last%sKey");
|
||||
addFunctionValueMappers("LAST_ENTRY_VALUE", "last%sValue");
|
||||
addFunctionValueMapper("MERGE", "merge");
|
||||
addFunctionMappers("POLL_FIRST_ENTRY_KEY", "pollFirst%sKey");
|
||||
addFunctionMappers("POLL_LAST_ENTRY_KEY", "pollLast%sKey");
|
||||
if(keyType.isObject()) addFunctionMapper("REMOVE_VALUE", "rem");
|
||||
else addSimpleMapper("REMOVE_VALUE", "remove");
|
||||
addFunctionMapper("REMOVE", "remove");
|
||||
addFunctionValueMappers("REPLACE_VALUES", valueType.isObject() ? "replaceObjects" : "replace%ss");
|
||||
addFunctionValueMappers("SUPPLY_IF_ABSENT", "supply%sIfAbsent");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addAbstractBiMapper("IMMUTABLE_HASH_MAP", "Immutable%sOpenHashMap", "2");
|
||||
addBiClassMapper("LINKED_CUSTOM_HASH_MAP", "LinkedOpenCustomHashMap", "2");
|
||||
addBiClassMapper("LINKED_HASH_MAP", "LinkedOpenHashMap", "2");
|
||||
addBiClassMapper("CUSTOM_HASH_MAP", "OpenCustomHashMap", "2");
|
||||
addBiClassMapper("CONCURRENT_HASH_MAP", "ConcurrentOpenHashMap", "2");
|
||||
addBiClassMapper("AVL_TREE_MAP", "AVLTreeMap", "2");
|
||||
addBiClassMapper("RB_TREE_MAP", "RBTreeMap", "2");
|
||||
addFunctionValueMappers("LINKED_ENUM_MAP", valueType.isObject() ? "LinkedEnum2ObjectMap" : "LinkedEnum2%sMap");
|
||||
addFunctionValueMappers("ENUM_MAP", valueType.isObject() ? "Enum2ObjectMap" : "Enum2%sMap");
|
||||
addBiClassMapper("HASH_MAP", "OpenHashMap", "2");
|
||||
addBiClassMapper("ARRAY_MAP", "ArrayMap", "2");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractBiMapper("ABSTRACT_MAP", "Abstract%sMap", "2");
|
||||
|
||||
//Helper Classes
|
||||
addBiClassMapper("MAPS", "Maps", "2");
|
||||
|
||||
//Interfaces
|
||||
addBiClassMapper("NAVIGABLE_MAP", "NavigableMap", "2");
|
||||
addBiClassMapper("ORDERED_MAP", "OrderedMap", "2");
|
||||
addBiClassMapper("SORTED_MAP", "SortedMap", "2");
|
||||
addBiClassMapper("CONCURRENT_MAP", "ConcurrentMap", "2");
|
||||
addBiClassMapper("MAP", "Map", "2");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadTestClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addAbstractBiMapper("SIMPLE_TEST_MAP", "Test%sMap", "2");
|
||||
addBiClassMapper("MAP_TESTS", "MapTests", "2");
|
||||
addAbstractBiMapper("NAVIGABLE_MAP_TEST_BUILDER", "%sNavigableMapTestSuiteBuilder", "2");
|
||||
addAbstractBiMapper("SORTED_MAP_TEST_BUILDER", "%sSortedMapTestSuiteBuilder", "2");
|
||||
addAbstractBiMapper("ORDERED_MAP_TEST_BUILDER", "%sOrderedMapTestSuiteBuilder", "2");
|
||||
addAbstractBiMapper("MAP_TEST_BUILDER", "%sMapTestSuiteBuilder", "2");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractBiMapper("ABSTRACT_MAP_TESTER", "Abstract%sMapTester", "2");
|
||||
|
||||
//Helper Classes
|
||||
addAbstractBiMapper("MAP_CONSTRUCTOR_TESTS", "%sMapConstructorTests", "2");
|
||||
addAbstractBiMapper("SIMPLE_MAP_TEST_GENERATOR", "Simple%sMapTestGenerator", "2");
|
||||
addAbstractBiMapper("DERIVED_MAP_GENERATORS", "Derived%sMapGenerators", "2");
|
||||
addAbstractBiMapper("TEST_ORDERED_MAP_GENERATOR", "Test%sOrderedMapGenerator", "2");
|
||||
addAbstractBiMapper("TEST_SORTED_MAP_GENERATOR", "Test%sSortedMapGenerator", "2");
|
||||
addAbstractBiMapper("TEST_MAP_GENERATOR", "Test%sMapGenerator", "2");
|
||||
}
|
||||
}
|
||||
71
src/builder/java/speiger/src/builder/modules/PairModule.java
Normal file
71
src/builder/java/speiger/src/builder/modules/PairModule.java
Normal file
@ -0,0 +1,71 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.FunctionDependency;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class PairModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new PairModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, true).addKeyDependency(JavaModule.MODULE);
|
||||
public static final FunctionDependency IMMUTABLE = MODULE.createDependency("Immutable");
|
||||
public static final FunctionDependency MUTABLE = MODULE.createDependency("Mutable");
|
||||
|
||||
|
||||
// public static final DependencyModule MODULE = new BiTypeModule(INSTANCE);
|
||||
// public static final DependencyFunction IMMUTABLE = MODULE.createFunction("Immutable");
|
||||
// public static final DependencyFunction MUTABLE = MODULE.createFunction("Mutable");
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "Pair"; }
|
||||
@Override
|
||||
public boolean isBiModule() { return true; }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
@Override
|
||||
protected void loadFunctions() {}
|
||||
@Override
|
||||
protected void loadTestClasses() {}
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) { return Arrays.asList(MODULE, IMMUTABLE, MUTABLE); }
|
||||
|
||||
@Override
|
||||
protected void loadFlags() {
|
||||
if(MODULE.isEnabled()) addFlag("PAIR_MODULE");
|
||||
if(MUTABLE.isEnabled()) addFlag("MUTABLE_PAIR");
|
||||
if(IMMUTABLE.isEnabled()) addFlag("IMMUTABLE_PAIR");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadBlockades() {
|
||||
if(!MODULE.isEnabled()) addBlockedFiles("Pair");
|
||||
if(!MUTABLE.isEnabled()) addBlockedFiles("MutablePair");
|
||||
if(!IMMUTABLE.isEnabled()) addBlockedFiles("ImmutablePair");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadRemappers() {
|
||||
//Main Classes
|
||||
addBiRequirement("Pair", "");
|
||||
addBiRequirement("MutablePair", "");
|
||||
addBiRequirement("ImmutablePair", "");
|
||||
|
||||
//Test Classes
|
||||
addBiRequirement("PairTester", "");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses() {
|
||||
//Implementations
|
||||
addBiClassMapper("IMMUTABLE_PAIR", "ImmutablePair", "");
|
||||
addBiClassMapper("MUTABLE_PAIR", "MutablePair", "");
|
||||
|
||||
//Interfaces
|
||||
addBiClassMapper("PAIR", "Pair", "");
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,102 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.FunctionDependency;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class PrioQueueModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new PrioQueueModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, false).addKeyDependency(CollectionModule.MODULE);
|
||||
public static final FunctionDependency IMPLEMENTATION = MODULE.createDependency("Implementations");
|
||||
public static final FunctionDependency WRAPPERS = MODULE.createDependency("Wrappers");
|
||||
public static final FunctionDependency DEQUEUE = MODULE.createDependency("Dequeue");
|
||||
|
||||
public static final FunctionDependency FIFO_QUEUE = MODULE.createDependency("FiFoQueue").addKeyDependency(DEQUEUE).addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency HEAP_QUEUE = MODULE.createDependency("HeapQueue").addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency ARRAY_PRIO_QUEUE = MODULE.createDependency("ArrayPrioQueue").addKeyDependency(IMPLEMENTATION);
|
||||
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "PriorityQueue"; }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
@Override
|
||||
protected void loadFunctions() {}
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) { return Arrays.asList(MODULE, WRAPPERS, IMPLEMENTATION, DEQUEUE, FIFO_QUEUE, HEAP_QUEUE, ARRAY_PRIO_QUEUE); }
|
||||
|
||||
@Override
|
||||
protected void loadFlags() {
|
||||
if(MODULE.isEnabled()) addFlag("QUEUE_MODULE");
|
||||
if(WRAPPERS.isEnabled()) addKeyFlag("QUEUES_FEATURE");
|
||||
if(DEQUEUE.isEnabled()) addKeyFlag("DEQUEUE_FEATURE");
|
||||
if(FIFO_QUEUE.isEnabled()) addKeyFlag("FIFO_QUEUE_FEATURE");
|
||||
if(HEAP_QUEUE.isEnabled()) addKeyFlag("HEAP_QUEUE_FEATURE");
|
||||
if(ARRAY_PRIO_QUEUE.isEnabled()) addKeyFlag("ARRAY_QUEUE_FEATURE");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadBlockades() {
|
||||
if(!MODULE.isEnabled()) addBlockedFiles("PriorityQueue", "AbstractPriorityQueue");
|
||||
if(!WRAPPERS.isEnabled()) addBlockedFiles("PriorityQueues");
|
||||
if(!DEQUEUE.isEnabled()) addBlockedFiles("PriorityDequeue");
|
||||
if(!FIFO_QUEUE.isEnabled()) addBlockedFiles("ArrayFIFOQueue");
|
||||
if(!HEAP_QUEUE.isEnabled()) addBlockedFiles("HeapPriorityQueue");
|
||||
if(!ARRAY_PRIO_QUEUE.isEnabled()) addBlockedFiles("ArrayPriorityQueue");
|
||||
|
||||
if(keyType == ClassType.BOOLEAN) {
|
||||
addBlockedFiles("QueueTests");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadRemappers() {
|
||||
//Main Classes
|
||||
addRemapper("AbstractPriorityQueue", "Abstract%sPriorityQueue");
|
||||
|
||||
//Test Classes
|
||||
addRemapper("TestQueueGenerator", "Test%sQueueGenerator");
|
||||
addRemapper("AbstractQueueTester", "Abstract%sQueueTester");
|
||||
addRemapper("SimpleQueueTestGenerator", "Simple%sQueueTestGenerator");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses() {
|
||||
//Implementation Classes
|
||||
addClassMapper("ARRAY_FIFO_QUEUE", "ArrayFIFOQueue");
|
||||
addClassMapper("ARRAY_PRIORITY_QUEUE", "ArrayPriorityQueue");
|
||||
addClassMapper("HEAP_PRIORITY_QUEUE", "HeapPriorityQueue");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_PRIORITY_QUEUE", "Abstract%sPriorityQueue");
|
||||
|
||||
//Helper Classes
|
||||
addClassMapper("PRIORITY_QUEUES", "PriorityQueues");
|
||||
|
||||
//Interfaces
|
||||
addClassMapper("PRIORITY_QUEUE", "PriorityQueue");
|
||||
addClassMapper("PRIORITY_DEQUEUE", "PriorityDequeue");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadTestClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addClassMapper("DEQUEUE_TEST_BUILDER", "DequeueTestSuiteBuilder");
|
||||
addClassMapper("QUEUE_TEST_BUILDER", "QueueTestSuiteBuilder");
|
||||
addClassMapper("QUEUE_TESTS", "QueueTests");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_QUEUE_TESTER", "Abstract%sQueueTester");
|
||||
|
||||
//Helper Classes
|
||||
addAbstractMapper("SIMPLE_QUEUE_TEST_GENERATOR", "Simple%sQueueTestGenerator");
|
||||
addAbstractMapper("TEST_QUEUE_GENERATOR", "Test%sQueueGenerator");
|
||||
}
|
||||
}
|
||||
161
src/builder/java/speiger/src/builder/modules/SetModule.java
Normal file
161
src/builder/java/speiger/src/builder/modules/SetModule.java
Normal file
@ -0,0 +1,161 @@
|
||||
package speiger.src.builder.modules;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import speiger.src.builder.ClassType;
|
||||
import speiger.src.builder.dependencies.FunctionDependency;
|
||||
import speiger.src.builder.dependencies.IDependency;
|
||||
import speiger.src.builder.dependencies.ModuleDependency;
|
||||
|
||||
@SuppressWarnings("javadoc")
|
||||
public class SetModule extends BaseModule
|
||||
{
|
||||
public static final BaseModule INSTANCE = new SetModule();
|
||||
public static final ModuleDependency MODULE = new ModuleDependency(INSTANCE, false).addKeyDependency(CollectionModule.MODULE).addKeyDependency(CollectionModule.SPLIT_ITERATORS);
|
||||
public static final FunctionDependency IMPLEMENTATION = MODULE.createDependency("Implementations");
|
||||
public static final FunctionDependency WRAPPERS = MODULE.createDependency("Wrappers");
|
||||
public static final FunctionDependency ORDERED_SET = MODULE.createDependency("OrderedSet");
|
||||
public static final FunctionDependency SORTED_SET = MODULE.createDependency("SortedSet");
|
||||
public static final FunctionDependency ARRAY_SET = MODULE.createDependency("ArraySet").addKeyDependency(ORDERED_SET).addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency IMMUTABLE_SET = MODULE.createDependency("ImmutableSet").addKeyDependency(ORDERED_SET).addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency HASH_SET = MODULE.createDependency("HashSet").addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency LINKED_SET = MODULE.createDependency("LinkedHashSet").addKeyDependency(ORDERED_SET).addKeyDependency(HASH_SET);
|
||||
public static final FunctionDependency CUSTOM_SET = MODULE.createDependency("CustomHashSet").addKeyDependency(IMPLEMENTATION).addKeyDependency(CollectionModule.STRATEGY);
|
||||
public static final FunctionDependency LINKED_CUSTOM_SET = MODULE.createDependency("LinkedCustomHashSet").addKeyDependency(ORDERED_SET).addKeyDependency(CUSTOM_SET);
|
||||
public static final FunctionDependency AVL_TREE_SET = MODULE.createDependency("AVLTreeSet").addKeyDependency(SORTED_SET).addKeyDependency(IMPLEMENTATION);
|
||||
public static final FunctionDependency RB_TREE_SET = MODULE.createDependency("RBTreeSet").addKeyDependency(SORTED_SET).addKeyDependency(IMPLEMENTATION);
|
||||
|
||||
@Override
|
||||
public String getModuleName() { return "Set"; }
|
||||
@Override
|
||||
protected void loadVariables() {}
|
||||
|
||||
@Override
|
||||
public boolean isModuleValid(ClassType keyType, ClassType valueType) { return keyType != ClassType.BOOLEAN; }
|
||||
@Override
|
||||
public List<IDependency> getDependencies(ClassType keyType, ClassType valueType) { return Arrays.asList(MODULE, WRAPPERS, ORDERED_SET, SORTED_SET, IMPLEMENTATION, ARRAY_SET, IMMUTABLE_SET, HASH_SET, LINKED_SET, CUSTOM_SET, LINKED_CUSTOM_SET, AVL_TREE_SET, RB_TREE_SET); }
|
||||
|
||||
@Override
|
||||
protected void loadFlags()
|
||||
{
|
||||
if(MODULE.isEnabled()) addFlag("SET_MODULE");
|
||||
if(WRAPPERS.isEnabled()) addFlag("SETS_FEATURE");
|
||||
if(ORDERED_SET.isEnabled()) addFlag("ORDERED_SET_FEATURE");
|
||||
if(SORTED_SET.isEnabled()) addFlag("SORTED_SET_FEATURE");
|
||||
if(IMMUTABLE_SET.isEnabled()) addFlag("IMMUTABLE_SET_FEATURE");
|
||||
if(ARRAY_SET.isEnabled()) addFlag("ARRAY_SET_FEATURE");
|
||||
if(HASH_SET.isEnabled()) addFlag("HASH_SET_FEATURE");
|
||||
if(LINKED_SET.isEnabled()) addFlag("LINKED_SET_FEATURE");
|
||||
if(CUSTOM_SET.isEnabled()) addFlag("CUSTOM_HASH_SET_FEATURE");
|
||||
if(LINKED_CUSTOM_SET.isEnabled()) addFlag("LINKED_CUSTOM_SET_FEATURE");
|
||||
if(AVL_TREE_SET.isEnabled()) addFlag("AVL_TREE_SET_FEATURE");
|
||||
if(RB_TREE_SET.isEnabled()) addFlag("RB_TREE_SET_FEATURE");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadBlockades()
|
||||
{
|
||||
if(!MODULE.isEnabled()) addBlockedFiles("Set", "AbstractSet");
|
||||
if(!WRAPPERS.isEnabled()) addBlockedFiles("Sets");
|
||||
if(!IMMUTABLE_SET.isEnabled()) addBlockedFiles("ImmutableOpenHashSet");
|
||||
if(!ORDERED_SET.isEnabled()) addBlockedFiles("OrderedSet");
|
||||
if(!HASH_SET.isEnabled()) addBlockedFiles("OpenHashSet");
|
||||
if(!LINKED_SET.isEnabled()) addBlockedFiles("LinkedOpenHashSet");
|
||||
if(!CUSTOM_SET.isEnabled()) addBlockedFiles("OpenCustomHashSet");
|
||||
if(!LINKED_CUSTOM_SET.isEnabled()) addBlockedFiles("LinkedOpenCustomHashSet");
|
||||
if(!ARRAY_SET.isEnabled()) addBlockedFiles("ArraySet");
|
||||
if(!SORTED_SET.isEnabled()) addBlockedFiles("SortedSet", "NavigableSet");
|
||||
if(!AVL_TREE_SET.isEnabled()) addBlockedFiles("AVLTreeSet");
|
||||
if(!RB_TREE_SET.isEnabled()) addBlockedFiles("RBTreeSet");
|
||||
|
||||
if(keyType == ClassType.BOOLEAN)
|
||||
{
|
||||
//Main Classes
|
||||
addBlockedFiles("SortedSet", "NavigableSet", "AVLTreeSet", "RBTreeSet");
|
||||
addBlockedFiles("OrderedSet", "ArraySet", "LinkedOpenHashSet", "LinkedOpenCustomHashSet");
|
||||
addBlockedFiles("Set", "Sets", "AbstractSet", "OpenHashSet", "OpenCustomHashSet", "ImmutableOpenHashSet");
|
||||
|
||||
//Test Classes
|
||||
addBlockedFiles("SetTests", "SetTestSuiteBuilder", "TestSetGenerator");
|
||||
addBlockedFiles("OrderedSetTestSuiteBuilder", "TestOrderedSetGenerator", "OrderedSetMoveTester", "OrderedSetNavigationTester", "OrderedSetIterationTester");
|
||||
addBlockedFiles("SortedSetTestSuiteBuilder", "TestSortedSetGenerator", "SortedSetNaviationTester", "SortedSetSubsetTestSetGenerator", "SortedSetIterationTester", "SortedSetNaviationTester");
|
||||
addBlockedFiles("NavigableSetTestSuiteBuilder", "TestNavigableSetGenerator", "NavigableSetNavigationTester");
|
||||
addBlockedFiles("MinimalSet", "AbstractSetTester", "SetAddAllTester", "SetAddTester", "SetCreationTester", "SetEqualsTester", "SetRemoveTester");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadRemappers()
|
||||
{
|
||||
//Main Classes
|
||||
addRemapper("AbstractSet", "Abstract%sSet");
|
||||
addRemapper("ImmutableOpenHashSet", "Immutable%sOpenHashSet");
|
||||
|
||||
//Test Classes
|
||||
addRemapper("MinimalSet", "Minimal%sSet");
|
||||
addRemapper("TestNavigableSetGenerator", "Test%sNavigableSetGenerator");
|
||||
addRemapper("TestSortedSetGenerator", "Test%sSortedSetGenerator");
|
||||
addRemapper("TestOrderedSetGenerator", "Test%sOrderedSetGenerator");
|
||||
addRemapper("TestSetGenerator", "Test%sSetGenerator");
|
||||
addRemapper("AbstractSetTester", "Abstract%sSetTester");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFunctions()
|
||||
{
|
||||
addFunctionMapper("POLL_FIRST_KEY", "pollFirst");
|
||||
addFunctionMapper("POLL_LAST_KEY", "pollLast");
|
||||
addFunctionMapper("FIRST_KEY", "first");
|
||||
addFunctionMapper("LAST_KEY", "last");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadTestClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addAbstractMapper("MINIMAL_SET", "Minimal%sSet");
|
||||
addClassMapper("ORDERED_SET_TEST_BUILDER", "OrderedSetTestSuiteBuilder");
|
||||
addClassMapper("SORTED_SET_TEST_BUILDER", "SortedSetTestSuiteBuilder");
|
||||
addClassMapper("NAVIGABLE_SET_TEST_BUILDER", "NavigableSetTestSuiteBuilder");
|
||||
addClassMapper("SET_TEST_BUILDER", "SetTestSuiteBuilder");
|
||||
addClassMapper("SET_TESTS", "SetTests");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_SET_TESTER", "Abstract%sSetTester");
|
||||
|
||||
//Helper Classes
|
||||
addClassMapper("SUB_SORTED_SET_CLASS_GENERATOR", "SortedSetSubsetTestSetGenerator");
|
||||
addClassMapper("SUB_NAVIGABLE_SET_CLASS_GENERATOR", "NavigableSetSubsetTestSetGenerator");
|
||||
addAbstractMapper("TEST_NAVIGABLE_SET_GENERATOR", "Test%sNavigableSetGenerator");
|
||||
addAbstractMapper("TEST_SORTED_SET_GENERATOR", "Test%sSortedSetGenerator");
|
||||
addAbstractMapper("TEST_ORDERED_SET_GENERATOR", "Test%sOrderedSetGenerator");
|
||||
addAbstractMapper("TEST_SET_GENERATOR", "Test%sSetGenerator");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadClasses()
|
||||
{
|
||||
//Implementation Classes
|
||||
addClassMapper("LINKED_CUSTOM_HASH_SET", "LinkedOpenCustomHashSet");
|
||||
addClassMapper("LINKED_HASH_SET", "LinkedOpenHashSet");
|
||||
addAbstractMapper("IMMUTABLE_HASH_SET", "Immutable%sOpenHashSet");
|
||||
addClassMapper("CUSTOM_HASH_SET", "OpenCustomHashSet");
|
||||
addClassMapper("HASH_SET", "OpenHashSet");
|
||||
addClassMapper("RB_TREE_SET", "RBTreeSet");
|
||||
addClassMapper("AVL_TREE_SET", "AVLTreeSet");
|
||||
addClassMapper("ARRAY_SET", "ArraySet");
|
||||
|
||||
//Abstract Classes
|
||||
addAbstractMapper("ABSTRACT_SET", "Abstract%sSet");
|
||||
|
||||
//Helper Classes
|
||||
addClassMapper("SETS", "Sets");
|
||||
|
||||
//Interfaces
|
||||
addClassMapper("NAVIGABLE_SET", "NavigableSet");
|
||||
addClassMapper("SORTED_SET", "SortedSet");
|
||||
addClassMapper("ORDERED_SET", "OrderedSet");
|
||||
addClassMapper("SET", "Set");
|
||||
}
|
||||
}
|
||||
@ -1,210 +1,266 @@
|
||||
package speiger.src.collections.PACKAGE.collections;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Objects;
|
||||
import java.util.AbstractCollection;
|
||||
|
||||
import speiger.src.collections.PACKAGE.collections.COLLECTION;
|
||||
#if !TYPE_OBJECT
|
||||
import speiger.src.collections.PACKAGE.utils.ITERATORS;
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Abstract Type Specific Collection that reduces boxing/unboxing
|
||||
* @Type(T)
|
||||
*/
|
||||
public abstract class ABSTRACT_COLLECTION KEY_GENERIC_TYPE extends AbstractCollection<CLASS_TYPE> implements COLLECTION KEY_GENERIC_TYPE
|
||||
{
|
||||
@Override
|
||||
public abstract ITERATOR KEY_GENERIC_TYPE iterator();
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean add(CLASS_TYPE e) { return COLLECTION.super.add(e); }
|
||||
|
||||
#endif
|
||||
@Override
|
||||
public boolean addAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = c.iterator();iter.hasNext();modified |= add(iter.NEXT()));
|
||||
return modified;
|
||||
}
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean contains(Object e) { return COLLECTION.super.contains(e); }
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of contains. This implementation iterates over the elements and returns true if the value match.
|
||||
* @param e the element that should be searched for.
|
||||
* @return true if the value was found.
|
||||
*/
|
||||
@Override
|
||||
public boolean contains(KEY_TYPE e) {
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) { if(KEY_EQUALS(iter.NEXT(), e)) return true; }
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean addAll(Collection<? extends CLASS_TYPE> c)
|
||||
{
|
||||
return c instanceof COLLECTION ? addAll((COLLECTION KEY_GENERIC_TYPE)c) : super.addAll(c);
|
||||
}
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of containsAll. This implementation iterates over all elements and checks all elements are present in the other collection.
|
||||
* @param c the collection that should be checked if it contains all elements.
|
||||
* @return true if all elements were found in the collection
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean containsAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = c.iterator();iter.hasNext();)
|
||||
if(!contains(iter.NEXT()))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* This implementation iterates over the elements of the collection and checks if they are stored in this collection
|
||||
* @param c the elements that should be checked for
|
||||
* @return true if any element is in this collection
|
||||
* @deprecated if this is a primitive collection
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
@Primitive
|
||||
public boolean containsAny(Collection<?> c) {
|
||||
Objects.requireNonNull(c);
|
||||
for(Object e : c)
|
||||
if(contains(e))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This implementation iterates over the elements of the collection and checks if they are stored in this collection.
|
||||
* @param c the elements that should be checked for
|
||||
* @return true if any element is in this collection
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean containsAny(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = c.iterator();iter.hasNext();)
|
||||
if(contains(iter.NEXT()))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean remove(Object e) { return COLLECTION.super.remove(e); }
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of remove. This implementation iterates over the elements until it finds the element that is searched for or it runs out of elements.
|
||||
* It stops after finding the first element
|
||||
* @param e the element that is searched for
|
||||
* @return true if the element was found and removed.
|
||||
*/
|
||||
@Override
|
||||
public boolean REMOVE_KEY(KEY_TYPE e) {
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
if(KEY_EQUALS(iter.NEXT(), e)) {
|
||||
iter.remove();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif
|
||||
/**
|
||||
* A Type-Specific implementation of removeAll. This Implementation iterates over all elements and removes them as they were found in the other collection.
|
||||
* @param c the elements that should be deleted
|
||||
* @return true if the collection was modified.
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean removeAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
if(c.contains(iter.NEXT())) {
|
||||
iter.remove();
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
return modified;
|
||||
}
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of retainAll. This Implementation iterates over all elements and removes them as they were not found in the other collection.
|
||||
* @param c the elements that should be kept
|
||||
* @return true if the collection was modified.
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean retainAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
if(c.isEmpty()) {
|
||||
boolean modified = !isEmpty();
|
||||
clear();
|
||||
return modified;
|
||||
}
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
if(!c.contains(iter.NEXT())) {
|
||||
iter.remove();
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
return modified;
|
||||
}
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/**
|
||||
* A Type-Specific implementation of toArray that links to {@link #TO_ARRAY(KEY_TYPE[])} with a newly created array.
|
||||
* @return an array containing all of the elements in this collection
|
||||
*/
|
||||
@Override
|
||||
public KEY_TYPE[] TO_ARRAY() {
|
||||
return TO_ARRAY(new KEY_TYPE[size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of toArray. This implementation iterates over all elements and unwraps them into primitive type.
|
||||
* @param a array that the elements should be injected to. If null or to small a new array with the right size is created
|
||||
* @return an array containing all of the elements in this collection
|
||||
*/
|
||||
@Override
|
||||
public KEY_TYPE[] TO_ARRAY(KEY_TYPE[] a) {
|
||||
if(a == null || a.length < size()) a = new KEY_TYPE[size()];
|
||||
ITERATORS.unwrap(a, iterator());
|
||||
return a;
|
||||
}
|
||||
#endif
|
||||
package speiger.src.collections.PACKAGE.collections;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Objects;
|
||||
import java.util.AbstractCollection;
|
||||
#if TYPE_OBJECT
|
||||
import java.util.function.Consumer;
|
||||
#endif
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
import speiger.src.collections.PACKAGE.functions.CONSUMER;
|
||||
import speiger.src.collections.PACKAGE.utils.ITERATORS;
|
||||
import speiger.src.collections.PACKAGE.utils.ARRAYS;
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Abstract Type Specific Collection that reduces boxing/unboxing
|
||||
* @Type(T)
|
||||
*/
|
||||
public abstract class ABSTRACT_COLLECTION KEY_GENERIC_TYPE extends AbstractCollection<CLASS_TYPE> implements COLLECTION KEY_GENERIC_TYPE
|
||||
{
|
||||
@Override
|
||||
public abstract ITERATOR KEY_GENERIC_TYPE iterator();
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean add(CLASS_TYPE e) { return COLLECTION.super.add(e); }
|
||||
|
||||
#endif
|
||||
@Override
|
||||
public boolean addAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = c.iterator();iter.hasNext();modified |= add(iter.NEXT()));
|
||||
return modified;
|
||||
}
|
||||
|
||||
@Override
|
||||
public COLLECTION KEY_GENERIC_TYPE copy() { throw new UnsupportedOperationException(); }
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean contains(Object e) { return COLLECTION.super.contains(e); }
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of contains. This implementation iterates over the elements and returns true if the value match.
|
||||
* @param e the element that should be searched for.
|
||||
* @return true if the value was found.
|
||||
*/
|
||||
@Override
|
||||
public boolean contains(KEY_TYPE e) {
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) { if(KEY_EQUALS(iter.NEXT(), e)) return true; }
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean addAll(Collection<? extends CLASS_TYPE> c)
|
||||
{
|
||||
return c instanceof COLLECTION ? addAll((COLLECTION KEY_GENERIC_TYPE)c) : super.addAll(c);
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of containsAll. This implementation iterates over all elements and checks all elements are present in the other collection.
|
||||
* @param c the collection that should be checked if it contains all elements.
|
||||
* @return true if all elements were found in the collection
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean containsAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
if(c.isEmpty()) return true;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = c.iterator();iter.hasNext();)
|
||||
if(!contains(iter.NEXT()))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
Objects.requireNonNull(c);
|
||||
return c instanceof COLLECTION ? containsAll((COLLECTION KEY_GENERIC_TYPE)c) : super.containsAll(c);
|
||||
}
|
||||
|
||||
/**
|
||||
* This implementation iterates over the elements of the collection and checks if they are stored in this collection
|
||||
* @param c the elements that should be checked for
|
||||
* @return true if any element is in this collection
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
@Primitive
|
||||
public boolean containsAny(Collection<?> c) {
|
||||
Objects.requireNonNull(c);
|
||||
if(c.isEmpty()) return false;
|
||||
for(Object e : c)
|
||||
if(contains(e))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This implementation iterates over the elements of the collection and checks if they are stored in this collection.
|
||||
* @param c the elements that should be checked for
|
||||
* @return true if any element is in this collection
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean containsAny(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
if(c.isEmpty()) return false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = c.iterator();iter.hasNext();)
|
||||
if(contains(iter.NEXT()))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/** {@inheritDoc}
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public boolean remove(Object e) { return COLLECTION.super.remove(e); }
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of remove. This implementation iterates over the elements until it finds the element that is searched for or it runs out of elements.
|
||||
* It stops after finding the first element
|
||||
* @param e the element that is searched for
|
||||
* @return true if the element was found and removed.
|
||||
*/
|
||||
@Override
|
||||
public boolean REMOVE_KEY(KEY_TYPE e) {
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
if(KEY_EQUALS(iter.NEXT(), e)) {
|
||||
iter.remove();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif
|
||||
/**
|
||||
* A Type-Specific implementation of removeAll. This Implementation iterates over all elements and removes them as they were found in the other collection.
|
||||
* @param c the elements that should be deleted
|
||||
* @return true if the collection was modified.
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean removeAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
if(c.isEmpty()) return false;
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
if(c.contains(iter.NEXT())) {
|
||||
iter.remove();
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
return modified;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(COLLECTION KEY_GENERIC_TYPE c, CONSUMER KEY_GENERIC_TYPE r) {
|
||||
Objects.requireNonNull(c);
|
||||
if(c.isEmpty()) return false;
|
||||
Objects.requireNonNull(r);
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
KEY_TYPE e = iter.NEXT();
|
||||
if(c.contains(e)) {
|
||||
r.accept(e);
|
||||
iter.remove();
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
return modified;
|
||||
}
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of retainAll. This Implementation iterates over all elements and removes them as they were not found in the other collection.
|
||||
* @param c the elements that should be kept
|
||||
* @return true if the collection was modified.
|
||||
* @throws java.lang.NullPointerException if the collection is null
|
||||
*/
|
||||
@Override
|
||||
public boolean retainAll(COLLECTION KEY_GENERIC_TYPE c) {
|
||||
Objects.requireNonNull(c);
|
||||
if(c.isEmpty()) {
|
||||
boolean modified = !isEmpty();
|
||||
clear();
|
||||
return modified;
|
||||
}
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
if(!c.contains(iter.NEXT())) {
|
||||
iter.remove();
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
return modified;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(COLLECTION KEY_GENERIC_TYPE c, CONSUMER KEY_GENERIC_TYPE r) {
|
||||
Objects.requireNonNull(c);
|
||||
Objects.requireNonNull(r);
|
||||
if(c.isEmpty()) {
|
||||
boolean modified = !isEmpty();
|
||||
forEach(r);
|
||||
clear();
|
||||
return modified;
|
||||
}
|
||||
boolean modified = false;
|
||||
for(ITERATOR KEY_GENERIC_TYPE iter = iterator();iter.hasNext();) {
|
||||
KEY_TYPE e = iter.NEXT();
|
||||
if(!c.contains(e)) {
|
||||
r.accept(e);
|
||||
iter.remove();
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
return modified;
|
||||
}
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/**
|
||||
* A Type-Specific implementation of toArray that links to {@link #TO_ARRAY(KEY_TYPE[])} with a newly created array.
|
||||
* @return an array containing all of the elements in this collection
|
||||
*/
|
||||
@Override
|
||||
public KEY_TYPE[] TO_ARRAY() {
|
||||
if(isEmpty()) return ARRAYS.EMPTY_ARRAY;
|
||||
return TO_ARRAY(new KEY_TYPE[size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* A Type-Specific implementation of toArray. This implementation iterates over all elements and unwraps them into primitive type.
|
||||
* @param a array that the elements should be injected to. If null or to small a new array with the right size is created
|
||||
* @return an array containing all of the elements in this collection
|
||||
*/
|
||||
@Override
|
||||
public KEY_TYPE[] TO_ARRAY(KEY_TYPE[] a) {
|
||||
if(a == null || a.length < size()) a = new KEY_TYPE[size()];
|
||||
ITERATORS.unwrap(a, iterator());
|
||||
if (a.length > size()) a[size()] = EMPTY_KEY_VALUE;
|
||||
return a;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
@ -1,19 +1,11 @@
|
||||
package speiger.src.collections.PACKAGE.collections;
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
import speiger.src.collections.objects.collections.ObjectBidirectionalIterator;
|
||||
/**
|
||||
* A Type-Specific {@link ObjectBidirectionalIterator} to reduce (un)boxing
|
||||
*/
|
||||
public interface BI_ITERATOR KEY_GENERIC_TYPE extends ITERATOR KEY_GENERIC_TYPE, ObjectBidirectionalIterator<CLASS_TYPE>
|
||||
#else
|
||||
/**
|
||||
* This is a basically a {@link java.util.ListIterator} without the index functions.
|
||||
* Allowing to have a simple Bidirectional Iterator without having to keep track of the Iteration index.
|
||||
* @Type(T)
|
||||
*/
|
||||
public interface BI_ITERATOR KEY_GENERIC_TYPE extends ITERATOR KEY_GENERIC_TYPE
|
||||
#endif
|
||||
{
|
||||
/**
|
||||
* Returns true if the Iterator has a Previous element
|
||||
@ -29,11 +21,11 @@ public interface BI_ITERATOR KEY_GENERIC_TYPE extends ITERATOR KEY_GENERIC_TYPE
|
||||
public KEY_TYPE PREVIOUS();
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/** {@inheritDoc}
|
||||
/**
|
||||
* <p>This default implementation delegates to the corresponding type-specific function.
|
||||
* @deprecated Please use the corresponding type-specific function instead.
|
||||
* @return the Previous element of the iterator.+
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public default CLASS_TYPE previous() {
|
||||
return KEY_TO_OBJ(PREVIOUS());
|
||||
@ -57,7 +49,7 @@ public interface BI_ITERATOR KEY_GENERIC_TYPE extends ITERATOR KEY_GENERIC_TYPE
|
||||
public default int back(int amount) {
|
||||
if(amount < 0) throw new IllegalStateException("Can't go forward");
|
||||
int i = 0;
|
||||
for(;i<amount && hasPrevious();previous(),i++);
|
||||
for(;i<amount && hasPrevious();PREVIOUS(),i++);
|
||||
return i;
|
||||
}
|
||||
}
|
||||
@ -1,198 +1,323 @@
|
||||
package speiger.src.collections.PACKAGE.collections;
|
||||
|
||||
import java.util.Collection;
|
||||
#if PRIMITIVES
|
||||
import java.util.Objects;
|
||||
import java.util.function.JAVA_PREDICATE;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.JAVA_STREAM;
|
||||
import java.util.stream.StreamSupport;
|
||||
#endif
|
||||
import speiger.src.collections.PACKAGE.utils.SPLIT_ITERATORS;
|
||||
|
||||
#if TYPE_BYTE || TYPE_SHORT || TYPE_CHAR || TYPE_FLOAT
|
||||
import speiger.src.collections.utils.SanityChecks;
|
||||
|
||||
#endif
|
||||
/**
|
||||
* A Type-Specific {@link Collection} that reduces (un)boxing
|
||||
* @Type(T)
|
||||
*/
|
||||
public interface COLLECTION KEY_GENERIC_TYPE extends Collection<CLASS_TYPE>, ITERABLE KEY_GENERIC_TYPE
|
||||
{
|
||||
#if !TYPE_OBJECT
|
||||
/**
|
||||
* A Type-Specific add function to reduce (un)boxing
|
||||
* @param o the element that should be added
|
||||
* @return true if the element was added to the collection
|
||||
*/
|
||||
public boolean add(KEY_TYPE o);
|
||||
|
||||
#endif
|
||||
/**
|
||||
* A Type-Specific addAll function to reduce (un)boxing
|
||||
* @param c the collection of elements that should be added
|
||||
* @return true if elements were added into the collection
|
||||
*/
|
||||
public boolean addAll(COLLECTION KEY_GENERIC_TYPE c);
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/**
|
||||
* A Type-Specific contains function to reduce (un)boxing
|
||||
* @param o the element that is checked for
|
||||
* @return true if the element is found in the collection
|
||||
*/
|
||||
public boolean contains(KEY_TYPE o);
|
||||
|
||||
#endif
|
||||
/**
|
||||
* A Type-Specific containsAll function to reduce (un)boxing
|
||||
* @param c the collection of elements that should be tested for
|
||||
* @return true if all the element is found in the collection
|
||||
*/
|
||||
public boolean containsAll(COLLECTION KEY_GENERIC_TYPE c);
|
||||
|
||||
/**
|
||||
* A Type-Specific containsAny function to reduce (un)boxing
|
||||
* @param c the collection of elements that should be tested for
|
||||
* @return true if any element was found
|
||||
*/
|
||||
public boolean containsAny(COLLECTION KEY_GENERIC_TYPE c);
|
||||
|
||||
/**
|
||||
* Returns true if any element of the Collection is found in the provided collection.
|
||||
* A Small Optimization function to find out of any element is present when comparing collections and not all of them.
|
||||
* @param c the collection of elements that should be tested for
|
||||
* @return true if any element was found.
|
||||
*/
|
||||
@Primitive
|
||||
public boolean containsAny(Collection<?> c);
|
||||
|
||||
#if !TYPE_OBJECT
|
||||
/**
|
||||
* A Type-Specific remove function that reduces (un)boxing.
|
||||
* @param o the element that should be removed
|
||||
* @return true if the element was removed
|
||||
* @see Collection#remove(Object)
|
||||