Compare commits

..

64 Commits

Author SHA1 Message Date
0b46b891c2 add simple quote message context menu 2023-09-01 21:13:36 +02:00
d131b5a02f add hacky way to load encrypted tox saves 2023-08-28 15:33:36 +02:00
9d9a486537 limit fps to 60 and disable imgui demo window 2023-08-28 14:06:27 +02:00
610ed10011 scope plugin paths 2023-08-20 12:50:58 +02:00
4edab11616 update deps and dont try to load files >50mib as images 2023-08-19 21:42:47 +02:00
6df0417667 update sdl Merge commit '4d48f9d23713d94b861da7b5d41baf2a41334994' 2023-08-12 20:17:29 +02:00
4d48f9d237 Squashed 'external/sdl/SDL/' changes from b8d91252c6..ec0042081e
ec0042081e Add .gitattributes file
a5d9db0cd0 cmake: build tests for UWP
b7889a7389 winrt: use windowsio in non-libc mode
ea8757a748 Make testaudiostreamdynamicresample compatible with emscripten
1a7a74fb2e cmake: build emscripten tests as html page
64d570f027 Add minimal http server for emscripten test apps
8e898c4a21 SDL_test does not parse --samples argument
91cd5478be audio: Fix resampler overflowing input buffer.
f290c85b22 testaudiocapture: Make sure we convert captured audio to output format.
b75c751dfc rwlock: Make generic implmentations work on single-threaded platforms.
80850af7ce The controller update complete events are no longer disabled by default
3f486224a9 Fixed refresh rate calculation for KMSDRM
342ec51131 Fix overflow when doing SDL_sscanf("%hd", ...)
9129e1d557 Fixed crash when setting the default cursor twice
8e99a4f4f5 Undo variable rename
be67f0de10 Fixed crashes related to the default cursor on WinRT and KMSDRM
94b3f78c44 Fix out of bound read of 'has_hat' array
94f48f19b0 Use more specific build destinations when creating an xcframework
dabd45997e Back out change supporting multiple names for binding elements
efe15588d5 Relabel back paddles as left or right
be884f0c95 ci: disable visionos.yml by renaming the file
ac094d00f5 ci: add workflow_dispatch event to visionos workflow
9be9e2292b build: Consistently use pathlib APIs in cmake/xxd.py
a9f6950657 Fixed deadlock shutting down Android sensors
d9f09e77f2 Actually make the sensors magical!
690eae7d22 Implement visionOS support
e385d6da0a Fixed build warning
6b93e788fa Improved sensor thread-safety
4ee0e5a984 Fixed thread-safety warnings
12deed91f8 Added information on how to enable thread-safety analysis
5735d2b03b coreaudio: Fixed assertion when device fails/quits mid-iteration.
1022fd6e04 testaudio: the test framework opens an audio device at startup; close it.
0714da37a4 audio: Fix audio stream callback calculations when future buffer has space.
917e036f6f MSVC has __declspec(deprecated)
279ff8909f Changed example code to avoid potential divide by zero
8a1afc9b10 Fixed Android not sending controller event timestamps
463c456b98 Fill the correct member with the joystick ID in SDL_EVENT_JOYSTICK_UPDATE_COMPLETE
55cf1abaa6 test: Don't flag testsurround as suitable for non-interactive use
a2d594269c Fixed pixel format compatibility with SDL2
79a190aa23 Fixed setting invalid bpp for FOURCC formats in SDL_GetMasksForPixelFormatEnum()
8fdebdd3e0 Sync SDL3 wiki -> header
b903ccf945 SDL_rwops read/write functions return size_t again
c03f5b4b69 Fixed rounding up in SDL_PrintFloat
75a020aa6b Only query serial number and firmware versions from Sony PS5 controllers
fa189d302e Added the Victrix Pro FS for PS4/PS5 to the controller list
26205b659d Fixed PS4/PS5 touchpad for third party controllers
6af0448af9 include: fixed a typo in SDL_RenderGetMetalCommandEncoder docs.
f3cb46b083 SDL_thread.h: do not conflict with sdl2-compat::sdl3_include_wrapper.h
080b1dfbdb Revert "Improved fallback for SDL_COMPILE_TIME_ASSERT() (thanks @icculus!)"
9d453daa23 Improved fallback for SDL_COMPILE_TIME_ASSERT() (thanks @icculus!)
1fb2419882 Removed reference to renamed function
e7d56dd0b2 audio: Renamed new API SDL_UnpauseAudioDevice to SDL_ResumeAudioDevice.
2b0c0f5b6b Don't pass NULL to strncmp
778e8185cd Fix size of memcpy in SDL_AudioDeviceFormatChangedAlreadyLocked And add diagnostic that allows to find this kind of issue in clang-tidy
4bb426abad Sync SDL3 wiki -> header
3a752ce650 Reapply "Changed 'freesrc' parameter from int to SDL_bool" to SDL_wave.c
2ba03b4db0 fix build after previous commit.
0026adffd4 apply force_align_arg_pointer attribute to correct version of SDL_RunApp
77446e2029 Unaligned stacks on i686-w64-mingw32 may lead to crashes
d3bcc3f057 Fixed build errors when OpenGL isn't enabled
35ad68e126 Sync SDL3 wiki -> header
70323a8350 Add a function to display the system menu for a window
be5f66c84e testaudio: Fixed soundboard icon, which had a colorkey issue.
c0a88930bf Sync SDL3 wiki -> header
18c59cc969 Merge the SDL3 audio subsystem redesign!
99b0e31788 The Steam Controller D-Pad is only pressed when the button is pressed down
103073d694 Set NSBluetoothAlwaysUsageDescription for testcontroller
ca02bb6c8c We don't need testdropfile-Info.plist
e063f662e9 Enable the controller update complete events
06bea1eb55 Added a gamepad mapping for the G-Shark GS-GP702
5ca3c50bf0 testaudio: Fix compiler warning.
1b1f02c5aa testaudio: Apparently compilers don't like this possibly being NULL now...?
2de9253b6c test: Added testaudio
fb3ab3f113 SDL_video.c: move ngage video before offscreen.
843572d993 Don't mark autorelease keys as virtual
648de4f9b8 Fixed duplicate key press/release events on iOS
a8abe612ed Only pass keypresses up the responder chain when text input is active
c3288d113e Synchronize on-screen keyboard state with text input active state
5fb92ef2f7 Fixed whitespace
f5ea6ae18d Revert "Stop beep when running iOS apps on ARM-based Macs"
546508b9b4 Allow test programs to run at full resolution on iPads
68a4bb01e0 Allocate displays as an array of pointers instead of an array of objects
07578fde3d Fixed crash if a display is enumerated twice
a509771a87 fix ios CI workflow after commit e4460e897f
72ce76905a The scheme isn't always the same as the framework name (e.g. xmp_lite vs xmp-lite)
e4460e897f By default Xcode expects the framework target name to be the name of the project.
ac683773dc Added missing tests to the "All" target
7dd56eaafe Removed unnecessary reference to testoverlay-Info.plist
e1c7f524ef Reduce the number of times SDL3 is duplicated in the xcframework script
65538011ca Make Xcode targets more specific
efe114c300 Revert "Renamed the xcframework target from "SDL.xcframework" to "xcframework""
73ed1d21a9 Renamed the xcframework target from "SDL.xcframework" to "xcframework"
76b4d8a0d8 Build the Framework instead of a static library for iOS and tvOS
d1bf979160 Removed unnecessary setting from the "Create DMG" target
c94cb3a5d8 Simplified the Xcode project to a single Framework target
ea60474c65 cmake: don't build SDL3-static Apple framework
8f00d7856d Sync SDL3 wiki -> header
d4a867a256 Rename SDL_GetPath to SDL_GetUserFolder
71099149b8 Fall back to Xlib if XRandR isn't available
b7f32f74ce Note the removal of the SDL_RENDERER_TARGETTEXTURE flag
0eda582160 testaudiostreamdynamicresample: Load sample.wav correctly.
87eae9a0a1 aaudio: We need a mixbuf on capture devices, too.
fb68e84646 wayland: Fix memory leaks
b0edd23c00 testsurround: Log available audio output devices at the start.
ae3090c387 androidaudio: Move Init/bootstrap code to bottom of source code.
18fc0db9e5 aaudio: Rearranged source code to match other backends.
2507c1d68b aaudio: Disconnect playing devices if error callback fires.
32a3fc3783 aaudio: Use the callback interface.
b49ce86765 audio: Fixed compiler warning on Android NDK.
1c074e8d97 android: Fixed audio device detection.
82ce05ad01 pulseaudio: Be more aggressive with hotplug thread synchronization.
5cbdf1168e androidaudio: Fixed incorrect JNI call (thanks, @madebr!)
660054f3dc include: Correct comment about audio device hotplug events.
ab68428a64 aaudio: Fixed for older SDKs and Android releases.
5ff87c6d4a android: Reworked audio backends for SDL3 audio API.
54af687210 testautomation_audio.c: Patched to compile.  :/
5e82090662 testautomation_audio.c: Apparently we aren't updating test code for C99 atm.
7f4488f625 wasapi: More fixes for Clang warnings.
29a0c689c9 wasapi: Patched to compile with Clang.
4aa95c21bc pspaudio: Patched to compile.
9a2a0a1463 ps2audio: Delete errant character that got inserted before previous commit.
2c578bd0d5 qnxaudio: Rewrite for SDL3 audio APIs.
455eef4cd9 audio: Use AtomicAdd for device counts, don't treat as a refcount.
095ea57f94 pspaudio: Patched to compile.
d7cf63db67 ps2audio: Patched to compile.
027b9e8787 coreaudio: (maybe) patched to compile on iOS.
4836c2db07 pspaudio: Patched to compile.
86ca412436 n3dsaudio: Patched to compile.
66bcee2ca9 testaudiostreamdynamicresample.c: Fixed MSVC compiler warning.
dbf993d358 vitaaudio: patched to compile.
5707e14716 audio: Fix up some things that broke when rebasing the branch against main.
6567285eae SDL_migration.cocci: Fix up SDL_(Pause|Unpause)Audio.
0b6255551e test: Fixed incorrect SDL_OpenAudioDevice call in testautomation.
107fd941cd vitaaudio: Clean up correctly in CloseDevice.
9fa4a6ef87 netbsdaudio: Minor fix.
b0d89868c6 n3dsaudio: Updated (but untested!) for SDL3 audio API.
ba27176106 vitaaudio: Untested attempt to move Vita audio to SDL3's audio API.
0b58e96d9e wasapi: Patched WinRT to compile.
d6b4f48488 visualc: Turn on multiprocessor compilation.
c58d95c343 wasapi: Reworked for new SDL3 audio API, other win32 fixes.
dc04f85646 audio: whoops, that should be an int.
be0dc630b7 audio: Fixed incorrect assertion
77b3fb06ee directsound: First shot at updating for SDL3 audio API.
4399b71715 audio: Generalize how backends can lookup an SDL_AudioDevice.
2fb122fe46 audio: backends now "find" instead of "obtain" devices by handle.
c3f5a5fc72 dummyaudio: SDL3ify style
7d65ff86e2 diskaudio: Adjusted for later SDL3 audio API redesign changes.
4ba9c2eade dummyaudio: Configurable delay, other SDL3 API fixes.
fb395d3ad7 sndio: Updated to the SDL3 audio API.
1a55282051 dsp: Some minor logic fixes
6bc85577d7 netbsdaudio: Updated for SDL3 audio API.
0f6e59312b netbsdaudio: Removed email address from source code.
51ae78c0af haikuaudio: Updated for SDL3 audio API.
fc7ed18ca1 emscriptenaudio: don't forget to finalize the audio thread
4233c41ce2 pulseaudio: Removed unnecessary variable.
a0528cd5ed emscriptenaudio: Updated for SDL3 audio API.
79cc29ba35 wave: Don't check if format->channels > INT_MAX, it's a Uint16.
1bfe97c235 pspaudio: Updated for SDL3 audio API.
121a2dce15 audio: Make sure `device->hidden` is NULL after CloseDevice
3d6ba0cafd ps2audio: Removed free of buffer that hasn't been allocated yet.
00ed6f8827 test: Fixed compiler warnings for unused vars.
6f12f68ec9 ps2audio: SDL3ified the style
4993743a02 ps2audio: Renamed `_this` to `device`
74568cdb2b ps2audio: Updated (but untested) for SDL3 audio API.
c83b68ef26 jack: renamed `_this` to `device`.
3f4f004794 audio: Remove an assertion that no longer makes sense.
86243b2589 jack: Use ProvidesOwnCallbackThread.
18906a32b8 jack: First shot at updating for SDL3 audio API.
a2b488359e dsp: Removed debug logging
6fd71185cd dsp: Updated for new SDL3 audio API.
3482d1215a alsa: Don't ever block in CaptureFromDevice.
65d296ef1a audio: Use SDL_powerof2 instead of reinventing it.
409b544505 alsa: Updated for new SDL3 audio API
0999a090a7 audio: More tweaking of `device->thread_alive`
f94ffd6092 audio: Fixed logic error
4deb2970c9 alsa: Renamed `_this` to `device`
0fb9e4baae audio: Remove no-longer-used SupportsNonPow2Samples
c653e57768 coreaudio: rewritten for SDL3 audio redesign!
533777eff5 audio: SDL_sysaudio.h comment conversion.
8473e522e0 audio: unify device thread naming.
258bc9efed audio: PlayDevice now passes the buffer, too, for convenience.
e518149d14 audio: Fixed locking in SDL_AudioDeviceDisconnected
22afa5735f audio: FreeDeviceHandle should pass the whole device, for convenience.
9e3c5f93e0 coreaudio: Change `_this` to `device`
e969160de0 audio: unset a freed variable to NULL
1fc01b0300 audio: Try to definitely have a default device set up.
b60a56d368 audio: take first reported device if no default was specified.
a8323ebe68 audio: Better handling of ProvidesOwnCallbackThread backends.
1dffb72c1d pipewire: Hooked up default device change notifications.
a93fcf2444 audio: fixed flushed stream reporting bytes but not being able to get them.
ad6c1781fc pulseaudio: Minor cleanups.
cfc8a0d17d pipewire: First shot at moving to the new SDL3 audio interfaces.
13202642a3 aaudio: Fixed capitialization, plus some minor cleanups.
3e9991b535 audio: Make sure we don't write to a NULL pointer.
943351affb pulseaudio: GetDefaultAudioInfo isn't a thing anymore.
11dfc4d737 test: Update testautomation_audio for SDL3 audio API.
29afc2e42b test: Update testresample for SDL3 audio API.
3a02eecced test: Update testsurround for SDL3 audio API.
e1c78718d4 test: testaudiocapture is updated for the SDL3 audio API.
f48cb716c2 pulseaudio: a couple minor tweaks.
dac25fe9eb audio: Seperate audio capture into Wait/Read operations.
3e10c0005d audio: Capture devices should respect logical device pausing.
7e700531c5 audio: Allow SDL_OpenAudioDevice to accept a NULL spec.
bb1cbbd33a test: Update testaudioinfo for SDL3 audio API.
883aee32c5 audio: Let default formats differ for output and capture devices.
62cf24eeb9 pulseaudio: Listen for server events in addition to sources and sinks.
924f370bd7 pulseaudio: Fix deadlock in HotplugThread.
5d4e9e5f80 test: Updated testaudiostreamdynamicresample to SDL3 audio API.
f883b9fc64 test: Updated testaudiohotplug to SDL3 audio API.
2be5f726d4 audio: Removed debug logging.
323ecce123 docs: Added migration note about SDL_AUDIODEVICEREMOVED.
47b0321ebf test: Removed loopwavequeue.c; obsolete in SDL3.
0e5a1d4f29 pulseaudio: Removed debug logging.
f598626e46 test: loopwave shouldn't use an audiostream callback.
eee407caf8 docs: migration guide note that SDL_LoadWAV has a different return type.
b03c493fc4 test: Updated testmultiaudio to new SDL3 audio API
fe1daf6fb5 audio: Mark disconnected default devices as "zombies".
cdd2ba81de audio: Fixed adding new physical devices to a double-linked list.
db39cbf208 audio: Allow SDL_GetAudioDeviceFormat() to query the default devices.
ee10bab3cd audio: An enormous amount of work on managing default devices.
c7a44eea83 audio: Fixed logic error.
089cd87cb5 audio: Make sure device count stays correct as hardware disconnects.
e50cb72eb6 docs: Note that audio opening doesn't implicitly init SDL now.
97b2f747d0 docs: Corrections to audio section of README-migration.md
464640440f audio: Added SDL_GetAudioStreamBinding.
01f7b53865 audio: Readded (logical) device pausing.
fd4c9f4e11 audio: documentation improvements.
4b78b789a7 audio: Switch SDL_audio.c and SDL_audiocvt.c to C99-ish syntax.
d96a1db7d7 audio: Opening via a logical device ID should also track default device.
b2e020958f audio: Wrap device access in opening of logical devices.
7ee2459927 audio: Check for unlikely failure case in WAV loaded.
3d65a2cefe audio: Made SDL_LoadWAV a real function, not just a macro.
26525f5fd3 audio: Readd SDL_AudioSpec, but just with format/channels/freq fields.
e6aaed7d79 include: Audio is not, and has not been, a raw mixing buffer for a long time.
56b1bc2198 audio: SDL_AudioStream now has callbacks for Get and Put operations.
905c4fff5b audio: First shot at the SDL3 audio subsystem redesign!
b221b59995 cmake: add SDL_REVISION option
0500fca00c Add missing break
d3f2de7f29 fixed typo in prev. patch.
12b35c6a46 test/testnativecocoa.m: fixed deprecation warnings.
e24b3e2fa4 cmake: rename SDL_TEST -> SDL_TEST_LIBRARY
da5016d336 cmake: use pkg-config + test compile instead of Find module for detecting rpi
deec574ff6 cmake: fix SDL_HIDAPI_LIBUSB
f2ae00c1ad Sync SDL3 wiki -> header
41a96c8133 doc: document building of SDL tests with CMake
3174d0b970 Sorted controller list
27b8abb056 Add Steam Deck controller mapping to database.
41d436f0fe Use SetWindowPos to show windows when SDL_HINT_WINDOW_ACTIVATE_WHEN_SHOWN is set to avoid activating the parent window when showing a child window
0dc85f3078 Improved the documentation for the gamepad paddle buttons
2fff999a41 Try to create the dummy mouse cursor after video backend initialization
d086d9874d Sync SDL3 wiki -> header
bce598addd SDL_pixels.c: Fixed compiler warning on Android NDK.
ad0c0d3cde Sync SDL3 wiki -> header
f8e8dff7ee tests: Fix automated window grab and positioning tests under Wayland/XWayland
4cffbc3644 Add VS code directory to gitignore
666f81bace Add more endian-specific aliases for 32 bit pixelformats
4749df0a63 Just disable the 4214 warning instead of trying to change the structure definition

git-subtree-dir: external/sdl/SDL
git-subtree-split: ec0042081ea104d5dd0ee291105210e00a4fe3d9
2023-08-12 20:17:29 +02:00
bd9bbf67d4 small fixes 2023-08-11 12:18:50 +02:00
8cab1307ab update sub 2023-08-09 11:18:41 +02:00
5bfc429450 small ui refactor + forwarding files 2023-08-06 16:07:50 +02:00
7a7b55bebf make tox save file selectable (hacky) 2023-08-03 15:03:12 +02:00
b4eda033c6 update plugin version 2023-08-03 13:11:59 +02:00
c9672bf352 add debug start screen ui + can load plugins 2023-08-03 13:05:19 +02:00
5547ff6d2b also scen media on message creation 2023-08-02 20:58:16 +02:00
ea8cc85c61 update subs for tox reconnection bug fixes 2023-08-02 20:21:06 +02:00
5ecec26731 image inlining working 2023-08-02 19:24:51 +02:00
01fddd19cd message texture cache + message image loader (no inline images yet) 2023-08-02 16:36:34 +02:00
e362af8271 cleanup texures on cache destruction 2023-08-02 15:37:37 +02:00
2dba4f8fbb add webp support 2023-08-02 15:06:19 +02:00
bdfe44399a Merge commit '67653bbe50841153bfeaebcc5792bd8464da9880' as 'external/libwebp/libwebp' 2023-08-02 14:57:22 +02:00
67653bbe50 Squashed 'external/libwebp/libwebp/' content from commit dd7364c3c
git-subtree-dir: external/libwebp/libwebp
git-subtree-split: dd7364c3cefe0f5c0b3c18c3b1887d353f90fc1f
2023-08-02 14:57:22 +02:00
a144459e8d add stb image loader (avatar png support) 2023-08-01 20:17:38 +02:00
d725ed8cd0 add stb 2023-08-01 20:13:38 +02:00
87d7eb69af Merge commit '3cd551098b8dc5a2b55d45a2a63bb9a219d31f2e' as 'external/stb/stb' 2023-08-01 20:11:22 +02:00
3cd551098b Squashed 'external/stb/stb/' content from commit c39c7023e
git-subtree-dir: external/stb/stb
git-subtree-split: c39c7023ebb833ce099750fe35509aca5662695e
2023-08-01 20:11:22 +02:00
95b77cb696 add avatar texture handling 2023-08-01 18:25:56 +02:00
2e28ad7bb9 fix missing include 2023-08-01 14:28:28 +02:00
75f78f8c7f load tox identicons 2023-08-01 13:21:16 +02:00
ef59386e5c start texture cache for contacts (avatars) 2023-07-31 20:47:22 +02:00
c0b57c30bd add sdl bmp image loader 2023-07-30 16:00:55 +02:00
42b3866753 start thinking about pasting files 2023-07-30 15:10:26 +02:00
aff239377d add README.md 2023-07-29 22:50:15 +02:00
3aaa1b0350 try satisfy macintosh 2023-07-29 22:30:36 +02:00
823a4ae189 missing include 2023-07-29 21:51:24 +02:00
40cd04f9dd fix cross platform file paths to string 2023-07-29 21:25:25 +02:00
6a979d31a0 fix missing includes 2023-07-29 20:58:29 +02:00
56f7db9ae6 msg sorting 2023-07-29 20:51:32 +02:00
d5e2dd2e1f add auto dirty (save toxfile) + minor stuff 2023-07-29 20:39:31 +02:00
93e5bb867b invites 2023-07-29 20:07:59 +02:00
4cd295065b add chat gui, probably works 2023-07-28 18:03:45 +02:00
5a9aacc603 add file selector with sorting 2023-07-27 19:34:47 +02:00
082c4febdf more setup 2023-07-26 20:09:57 +02:00
a848a01527 add tox client and more setup 2023-07-26 12:55:50 +02:00
3a1c15f313 screen concept 2023-07-26 12:24:18 +02:00
e92c7cbfa0 add exe icon 2023-07-26 02:02:06 +02:00
4d09e1fd4a gh windows cd 2023-07-26 01:13:54 +02:00
07765b4ad7 wire up imgui 2023-07-26 01:11:17 +02:00
f66b8d3ea2 Merge commit '88f609166511499341dde3b5a6191e34641d2eeb' as 'external/imgui/imgui' 2023-07-26 00:23:40 +02:00
88f6091665 Squashed 'external/imgui/imgui/' content from commit d4ddc46e7
git-subtree-dir: external/imgui/imgui
git-subtree-split: d4ddc46e7773e9a9b68f965d007968f35ca4e09a
2023-07-26 00:23:40 +02:00
a0122d38f2 minimal sdl app 2023-07-25 23:59:35 +02:00
e4d2987c54 add sdl 2023-07-25 22:47:22 +02:00
dec0d4ec41 Squashed 'external/sdl/SDL/' content from commit b8d91252c
git-subtree-dir: external/sdl/SDL
git-subtree-split: b8d91252c6488cf9c645aba447b56bb9dbd9609a
2023-07-25 22:27:55 +02:00
b8e444eaa8 Merge commit 'dec0d4ec4153bf9fc2b78ae6c2df45b6ea8dde7a' as 'external/sdl/SDL' 2023-07-25 22:27:55 +02:00
b38ea75e56 updat sol toxcore 2023-07-25 22:08:02 +02:00
8613511928 more fixing 2023-07-25 16:22:35 +02:00
7650d0ff23 update plugin sub 2023-07-25 15:38:14 +02:00
4ec6a26d91 start with gh ci 2023-07-25 15:10:52 +02:00
efa781e9ba empty main 2023-07-25 14:56:22 +02:00
41d0fc02c3 should compile 2023-07-25 14:35:57 +02:00
5568b70d31 Merge commit '227425b90e9a671118026689dd30967e127a1090' as 'external/toxcore/c-toxcore' 2023-07-25 11:53:09 +02:00
310a099f14 Merge commit '5c7231b7a33124aeadedbd8c7579376494a05a56' as 'external/entt/entt' 2023-07-25 11:29:51 +02:00
5c7231b7a3 Squashed 'external/entt/entt/' content from commit fef92113
git-subtree-dir: external/entt/entt
git-subtree-split: fef921132cae7588213d0f9bcd2fb9c8ffd8b7fc
2023-07-25 11:29:51 +02:00
df9fc529e2 add gitignore 2023-07-25 11:29:43 +02:00
f077a29cf0 partial setup 2023-07-25 01:11:30 +02:00
3567 changed files with 1100341 additions and 29446 deletions

View File

@ -1,12 +0,0 @@
#!/bin/sh
set -eux
git submodule update --init --recursive
/src/workspace/tools/inject-repo c-toxcore
# TODO(iphydf): Re-enable fuzz-test when https://github.com/tweag/rules_nixpkgs/issues/442 is fixed.
cd /src/workspace && bazel test -k \
--build_tag_filters=-haskell,-fuzz-test \
--test_tag_filters=-haskell,-fuzz-test \
-- \
"$@"

View File

@ -1,79 +0,0 @@
---
bazel-opt_task:
container:
image: toxchat/toktok-stack:latest-release
cpu: 2
memory: 2G
configure_script:
- git submodule update --init --recursive
- /src/workspace/tools/inject-repo c-toxcore
- sed -i -e 's/build --config=remote/#&/' /src/workspace/.bazelrc.local
test_all_script:
- cd /src/workspace && bazel test -k
--build_tag_filters=-haskell
--test_tag_filters=-haskell
--
//c-toxcore/...
-//c-toxcore/auto_tests:tcp_relay_test # Cirrus doesn't allow external network connections.
bazel-dbg_task:
container:
image: toxchat/toktok-stack:latest-debug
cpu: 2
memory: 2G
configure_script:
- git submodule update --init --recursive
- /src/workspace/tools/inject-repo c-toxcore
test_all_script:
- cd /src/workspace && bazel test -k
--build_tag_filters=-haskell
--test_tag_filters=-haskell
--remote_http_cache=http://$CIRRUS_HTTP_CACHE_HOST
--
//c-toxcore/...
-//c-toxcore/auto_tests:tcp_relay_test # Cirrus doesn't allow external network connections.
cimple_task:
container:
image: toxchat/toktok-stack:latest-release
cpu: 2
memory: 4G
configure_script:
- git submodule update --init --recursive
- /src/workspace/tools/inject-repo c-toxcore
- sed -i -e 's/build --config=remote/#&/' /src/workspace/.bazelrc.local
test_all_script:
- cd /src/workspace && bazel test -k
--build_tag_filters=haskell
--test_tag_filters=haskell
--
//c-toxcore/...
freebsd_task:
freebsd_instance:
image_family: freebsd-14-0
configure_script:
- PAGER=cat ASSUME_ALWAYS_YES=YES pkg install
cmake
git
gmake
googletest
libconfig
libsodium
libvpx
opus
pkgconf
- git submodule update --init --recursive
test_all_script:
- |
# TODO(iphydf): Investigate FreeBSD failures on these tests.
sed -Ei -e '/\(dht_getnodes_api\)/s/^/#/' auto_tests/CMakeLists.txt
cmake . \
-DMIN_LOGGER_LEVEL=TRACE \
-DMUST_BUILD_TOXAV=ON \
-DNON_HERMETIC_TESTS=OFF \
-DTEST_TIMEOUT_SECONDS=50 \
-DUSE_IPV6=OFF \
-DAUTOTEST=ON
cmake --build . --target install
ctest -j50 --output-on-failure --rerun-failed --repeat until-pass:6

View File

@ -1,49 +0,0 @@
# vim:ft=yaml
HeaderFilterRegex: "/c-toxcore/[^/]+/[^.].+"
CheckOptions:
- key: readability-identifier-naming.ClassCase
value: Camel_Snake_Case
- key: readability-identifier-naming.ConstantCase
value: lower_case
# bootstrap-daemon has these.
- key: readability-identifier-naming.ConstantIgnoredRegexp
value: "^NAME_.*"
- key: readability-identifier-naming.EnumCase
value: Camel_Snake_Case
- key: readability-identifier-naming.EnumConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.FunctionCase
value: lower_case
- key: readability-identifier-naming.GlobalConstantCase
value: lower_case
- key: readability-identifier-naming.MemberCase
value: lower_case
- key: readability-identifier-naming.MacroDefinitionCase
value: UPPER_CASE
- key: readability-identifier-naming.MacroDefinitionIgnoredRegexp
value: "^_.*|bitwise|force|nullable|non_null|nullptr|static_assert|ck_.*"
- key: readability-identifier-naming.ParameterCase
value: lower_case
- key: readability-identifier-naming.StructCase
value: Camel_Snake_Case
- key: readability-identifier-naming.TypedefCase
value: Camel_Snake_Case
- key: readability-identifier-naming.TypedefIgnoredRegexp
value: ".*_cb"
- key: readability-identifier-naming.UnionCase
value: Camel_Snake_Case
- key: readability-identifier-naming.VariableCase
value: lower_case
- key: llvmlibc-restrict-system-libc-headers.Includes
value: "alloca.h,arpa/inet.h,assert.h,ctype.h,errno.h,fcntl.h,getopt.h,libconfig.h,limits.h,linux/if.h,math.h,netdb.h,netinet/in.h,opus.h,pthread.h,signal.h,sodium/crypto_scalarmult_curve25519.h,sodium.h,sodium/randombytes.h,stdarg.h,stdbool.h,stddef.h,stdint.h,stdio.h,stdlib.h,string.h,sys/ioctl.h,syslog.h,sys/resource.h,sys/socket.h,sys/stat.h,sys/time.h,sys/types.h,time.h,unistd.h,vpx/vp8cx.h,vpx/vp8dx.h,vpx/vpx_decoder.h,vpx/vpx_encoder.h,vpx/vpx_image.h"
- key: hicpp-signed-bitwise.IgnorePositiveIntegerLiterals
value: true
- key: concurrency-mt-unsafe.FunctionSet
value: posix
- key: readability-function-cognitive-complexity.Threshold
value: 153 # TODO(iphydf): Decrease. tox_new is the highest at the moment.
- key: cppcoreguidelines-avoid-do-while.IgnoreMacros
value: true
- key: readability-simplify-boolean-expr.SimplifyDeMorgan
value: false

View File

@ -1,11 +0,0 @@
{
"image": "mcr.microsoft.com/devcontainers/universal:2",
"features": {
"ghcr.io/devcontainers-contrib/features/bash-command:1": {
"command": "git submodule update --init --recursive"
},
"ghcr.io/devcontainers-contrib/features/apt-packages:1": {
"packages": "libconfig-dev,libopus-dev,libvpx-dev,ninja-build"
}
}
}

View File

@ -1,2 +0,0 @@
/_build
/_install

View File

@ -1,55 +0,0 @@
#!/bin/bash
# Copyright (C) 2018-2023 nurupo
# Toxcore building
set -eux
cd .. # /work
. cmake-alpine-run.sh
# === Get VM ready to build the code ===
start_vm
RUN apk add cmake g++ ninja
mv c-toxcore /
# Copy over toxcore code from host to qemu
scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -P "$SSH_PORT" -r /c-toxcore root@localhost:~
cd /c-toxcore
. ".github/scripts/flags-gcc.sh"
# Make compilation error on a warning
add_flag -Werror
# - disabling toxav because vpx doesn't work on s390x.
# - disabling bootstrap daemons because we don't need them for testing (saving time).
# - disabling shared libraries because it saves a lot of time on building PIC objects.
# - enable unity build because it saves a lot of time as well (fewer objects to build).
RUN "cmake -B_build -Hc-toxcore -GNinja \
-DCMAKE_C_FLAGS='$C_FLAGS' \
-DCMAKE_CXX_FLAGS='$CXX_FLAGS' \
-DCMAKE_EXE_LINKER_FLAGS='$LD_FLAGS' \
-DCMAKE_SHARED_LINKER_FLAGS='$LD_FLAGS' \
-DCMAKE_INSTALL_PREFIX:PATH='_install' \
-DCMAKE_UNITY_BUILD=ON \
-DMIN_LOGGER_LEVEL=TRACE \
-DNON_HERMETIC_TESTS=ON \
-DENABLE_SHARED=OFF \
-DBUILD_TOXAV=OFF \
-DDHT_BOOTSTRAP=OFF \
-DBOOTSTRAP_DAEMON=OFF \
-DSTRICT_ABI=ON \
-DTEST_TIMEOUT_SECONDS=90 \
-DUSE_IPV6=OFF \
-DAUTOTEST=ON"
RUN 'cmake --build _build --parallel 2 --target install -- -k 0'
RUN 'cd _build && ctest -j50 --output-on-failure --rerun-failed --repeat until-pass:1 --timeout 90 || true' &
# Give the tests 5 minutes to run. Sometimes, the per-test timeout doesn't
# work, so we put a global timeout here for everything.
sleep 300

View File

@ -1,58 +0,0 @@
#!/bin/bash
# Copyright (C) 2018-2023 nurupo
# Toxcore building
set -eux
cd .. # /work
. cmake-freebsd-run.sh
# === Get VM ready to build the code ===
# Unpack image only if it's compressed.
if [ -f "$IMAGE_NAME.gz" ]; then
gunzip "$IMAGE_NAME.gz"
fi
mv c-toxcore /
start_vm
# Copy over toxcore code from host to qemu
scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -P "$SSH_PORT" -r /c-toxcore root@localhost:~
RUN ls -lh
cd /c-toxcore
. '.github/scripts/flags-clang.sh'
add_ld_flag -Wl,-z,defs
# Make compilation error on a warning.
add_flag -Werror
# This triggers on FreeBSD's clang.
add_flag -Wno-format
add_flag -Wno-unsafe-buffer-usage
RUN "cmake -B_build -Hc-toxcore \
-DCMAKE_C_FLAGS='$C_FLAGS' \
-DCMAKE_CXX_FLAGS='$CXX_FLAGS' \
-DCMAKE_EXE_LINKER_FLAGS='$LD_FLAGS' \
-DCMAKE_SHARED_LINKER_FLAGS='$LD_FLAGS' \
-DCMAKE_INSTALL_PREFIX:PATH='_install' \
-DMIN_LOGGER_LEVEL=TRACE \
-DMUST_BUILD_TOXAV=ON \
-DNON_HERMETIC_TESTS=ON \
-DSTRICT_ABI=ON \
-DTEST_TIMEOUT_SECONDS=120 \
-DUSE_IPV6=OFF \
-DAUTOTEST=ON"
# We created the VM with the same number of cores as the host, so the host-ran `nproc` here is fine.
RUN 'cmake --build _build --parallel "$NPROC" --target install -- -k'
RUN 'cd _build && ctest -j50 --output-on-failure --rerun-failed --repeat until-pass:2 --timeout 120 || true'
# Gracefully shut down the VM.
stop_vm

43
.github/workflows/cd.yml vendored Normal file
View File

@ -0,0 +1,43 @@
name: ContinuousDelivery
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
env:
BUILD_TYPE: Release
jobs:
windows:
timeout-minutes: 15
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Dependencies
run: vcpkg install libsodium:x64-windows-static pthreads:x64-windows-static
# setup vs env
- uses: ilammy/msvc-dev-cmd@v1
with:
arch: amd64
- name: Configure CMake
run: cmake -G Ninja -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DVCPKG_TARGET_TRIPLET=x64-windows-static
- name: Build
run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} -j 4
- uses: actions/upload-artifact@v3
with:
name: windows_msvc_x86-64
# TODO: do propper packing
path: |
${{github.workspace}}/build/bin/

View File

@ -1,200 +1,74 @@
name: ci
name: ContinuousIntegration
on:
push:
branches: [ master ]
pull_request:
branches: [master]
branches: [ master ]
# Cancel old PR builds when pushing new commits.
concurrency:
group: build-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
BUILD_TYPE: Debug
jobs:
common:
uses: TokTok/ci-tools/.github/workflows/common-ci.yml@master
linux:
timeout-minutes: 10
analysis:
strategy:
fail-fast: false
matrix:
tool: [autotools, clang-tidy, compcert, cppcheck, doxygen, goblint, infer, freebsd, misra, modules, pkgsrc, rpm, slimcc, sparse, tcc, tokstyle]
runs-on: ubuntu-latest
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: docker
- name: Build toxchat/c-toxcore:sources
uses: docker/build-push-action@v5
with:
file: other/docker/sources/sources.Dockerfile
tags: toxchat/c-toxcore:sources
- name: Docker Build
uses: docker/build-push-action@v5
with:
file: other/docker/${{ matrix.tool }}/${{ matrix.tool }}.Dockerfile
coverage-linux:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Build, test, and upload coverage
run: other/docker/coverage/run
- uses: actions/checkout@v3
with:
submodules: recursive
generate-events:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Run generate_event_c
run: |
other/event_tooling/run
git diff --exit-code
- name: Install Dependencies
run: sudo apt update && sudo apt -y install libsodium-dev
cimplefmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Run cimplefmt
run: other/docker/cimplefmt/run -u $(find tox* -name "*.[ch]")
- name: Configure CMake
run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}}
build-android:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- run: .github/scripts/cmake-android armeabi-v7a
- run: .github/scripts/cmake-android arm64-v8a
- run: .github/scripts/cmake-android x86
- run: .github/scripts/cmake-android x86_64
- name: Build
run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} -j 4
macos:
timeout-minutes: 10
build-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Build and test
run: .github/scripts/cmake-osx
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Dependencies
run: brew install libsodium
- name: Configure CMake
run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}}
- name: Build
run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} -j 4
windows:
timeout-minutes: 10
runs-on: windows-latest
build-windows-msvc:
strategy:
matrix:
version: [2019, 2022]
runs-on: windows-${{ matrix.version }}
env:
VCPKG_ROOT: "C:/vcpkg"
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite"
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Export GitHub Actions cache environment variables
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Configure CMake
run: cmake --preset windows-default
- name: Build
run: cmake --build _build -j $([int]$env:NUMBER_OF_PROCESSORS+2)
- name: Test
run: |
cd _build
ctest -j50 --output-on-failure --rerun-failed --repeat until-pass:6 --build-config Debug
- uses: actions/checkout@v3
with:
submodules: recursive
build-netbsd:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Test in NetBSD
id: test
uses: vmactions/netbsd-vm@v1
with:
usesh: true
copyback: false
prepare:
/usr/sbin/pkg_add
cmake
googletest
libconfig
libopus
libsodium
libvpx
pkg-config
- name: Install Dependencies
run: vcpkg install libsodium:x64-windows-static pthreads:x64-windows-static
run: |
# TODO(iphydf): Investigate NetBSD failures on these tests.
sed -Ei -e '/\((TCP|dht_getnodes_api)\)/s/^/#/' auto_tests/CMakeLists.txt
cmake . \
-DMIN_LOGGER_LEVEL=TRACE \
-DMUST_BUILD_TOXAV=ON \
-DNON_HERMETIC_TESTS=ON \
-DTEST_TIMEOUT_SECONDS=90 \
-DUSE_IPV6=OFF \
-DAUTOTEST=ON
cmake --build . --target install
ctest -j50 --output-on-failure --rerun-failed --repeat until-pass:6
# setup vs env
- uses: ilammy/msvc-dev-cmd@v1
with:
arch: amd64
build-freebsd:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Test in FreeBSD
id: test
uses: vmactions/freebsd-vm@v1
with:
usesh: true
copyback: false
prepare:
PAGER=cat ASSUME_ALWAYS_YES=YES pkg install
cmake
git
gmake
googletest
libconfig
libsodium
libvpx
opus
pkgconf
- name: Configure CMake
run: cmake -G Ninja -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DVCPKG_TARGET_TRIPLET=x64-windows-static
run: |
# TODO(iphydf): Investigate FreeBSD failures on these tests.
sed -Ei -e '/\(dht_getnodes_api\)/s/^/#/' auto_tests/CMakeLists.txt
cmake . \
-DMIN_LOGGER_LEVEL=TRACE \
-DMUST_BUILD_TOXAV=ON \
-DNON_HERMETIC_TESTS=ON \
-DTEST_TIMEOUT_SECONDS=50 \
-DUSE_IPV6=OFF \
-DAUTOTEST=ON
cmake --build . --target install
ctest -j50 --output-on-failure --rerun-failed --repeat until-pass:6
- name: Build
run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} -j 4
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Set up Python 3.9
uses: actions/setup-python@v5
with:
python-version: 3.9
- name: Install mypy
run: pip install mypy
- name: Run mypy
run: |
(find . -name "*.py" -and -not -name "conanfile.py"; grep -lR '^#!.*python') \
| xargs -n1 -P8 mypy --strict

View File

@ -1,43 +0,0 @@
name: post-submit
on:
push:
branches: [master]
jobs:
build-alpine-s390x:
runs-on: ubuntu-latest
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker Build
uses: docker/build-push-action@v5
with:
file: other/docker/alpine-s390x/Dockerfile
docker-coverage:
runs-on: ubuntu-latest
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: docker
- name: Login to DockerHub
if: ${{ github.event_name == 'push' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build toxchat/c-toxcore:sources
uses: docker/build-push-action@v5
with:
file: other/docker/sources/sources.Dockerfile
tags: toxchat/c-toxcore:sources
- name: Build and push
uses: docker/build-push-action@v5
with:
file: other/docker/coverage/coverage.Dockerfile
push: ${{ github.event_name == 'push' }}
tags: toxchat/c-toxcore:coverage
cache-from: type=registry,ref=toxchat/c-toxcore:coverage
cache-to: type=inline

119
.gitignore vendored
View File

@ -1,101 +1,22 @@
# OS files
.DS_Store
.DS_Store?
._*
.mypy_cache
.Spotlight-V100
.Trash*
Icon?
ethumbs.db
Thumbs.db
*.tmp
# Make
/_build
/_install
/tox-0.0.0*
/.vs
/CppProperties.json
CMakeCache.txt
CMakeFiles
Makefile
!/other/rpm/Makefile
cmake_install.cmake
install_manifest.txt
tags
Makefile.in
CMakeLists.txt.user
DartConfiguration.tcl
CTestTestfile.cmake
*.pc
# Testing
/amalgamation.*
testing/data
*~
# Vim
*.swp
# Object files
.vs/
*.o
*.lo
*.a
# Executables
*.exe
*.out
*.app
*.la
# Libraries
*.so
# Misc (?)
m4/*
configure
configure_aux
!m4/pkg.m4
aclocal.m4
config.h*
config.log
config.status
stamp-h1
autom4te.cache
libtool
.deps
.libs
.dirstamp
build/
*.nvim*
*.vim*
# kdevelop
.kdev/
*.kdev*
# VScode
.vscode/
# Netbeans
nbproject
# astyle
*.orig
# Android buildscript
android-toolchain-*
toxcore-android-*
# cscope files list
cscope.files
# rpm
tox.spec
# clangd
.cache/
compile_commands.json
/infer
*.swp
~*
*~
.idea/
cmake-build-debug/
cmake-build-debugandtest/
cmake-build-release/
*.stackdump
*.coredump
compile_commands.json
/build*
.clangd
.cache
.DS_Store
.AppleDouble
.LSOverride
CMakeLists.txt.user*
CMakeCache.txt

25
.gitmodules vendored
View File

@ -1,3 +1,22 @@
[submodule "third_party/cmp"]
path = third_party/cmp
url = https://github.com/camgunz/cmp
[submodule "external/toxcore/c-toxcore/third_party/cmp"]
path = external/toxcore/c-toxcore/third_party/cmp
url = https://github.com/camgunz/cmp.git
shallow = true
[submodule "external/solanaceae_toxcore"]
path = external/solanaceae_toxcore
url = https://github.com/Green-Sky/solanaceae_toxcore.git
[submodule "external/solanaceae_util"]
path = external/solanaceae_util
url = https://github.com/Green-Sky/solanaceae_util.git
[submodule "external/solanaceae_contact"]
path = external/solanaceae_contact
url = https://github.com/Green-Sky/solanaceae_contact.git
[submodule "external/solanaceae_message3"]
path = external/solanaceae_message3
url = https://github.com/Green-Sky/solanaceae_message3.git
[submodule "external/solanaceae_tox"]
path = external/solanaceae_tox
url = https://github.com/Green-Sky/solanaceae_tox.git
[submodule "external/solanaceae_plugin"]
path = external/solanaceae_plugin
url = https://github.com/Green-Sky/solanaceae_plugin.git

View File

@ -1,610 +1,52 @@
################################################################################
#
# The main toxcore CMake build file.
#
# This file when processed with cmake produces:
# - A number of small libraries (.a/.so/...) containing independent components
# of toxcore. E.g. the DHT has its own library, and the system/network
# abstractions are in their own library as well. These libraries are not
# installed on `make install`. The toxav, and toxencryptsave libraries are
# also not installed.
# - A number of small programs, statically linked if possible.
# - One big library containing all of the toxcore, toxav, and toxencryptsave
# code.
#
################################################################################
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
cmake_minimum_required(VERSION 3.16)
cmake_policy(VERSION 3.16)
project(toxcore)
# cmake setup begin
project(tomato)
list(APPEND CMAKE_MODULE_PATH ${toxcore_SOURCE_DIR}/cmake)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
option(FLAT_OUTPUT_STRUCTURE "Whether to produce output artifacts in ${CMAKE_BINARY_DIR}/{bin,lib}" OFF)
if(FLAT_OUTPUT_STRUCTURE)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
# defaulting to debug mode, if not specified
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "Debug")
endif()
set_source_files_properties(
toxcore/mono_time.c
toxcore/network.c
toxcore/tox.c
toxcore/util.c
PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
# setup my vim ycm :D
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
################################################################################
#
# :: Version management
#
################################################################################
# more paths
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib")
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin")
# This version is for the entire project. All libraries (core, av, ...) move in
# versions in a synchronised way.
set(PROJECT_VERSION_MAJOR "0")
set(PROJECT_VERSION_MINOR "2")
set(PROJECT_VERSION_PATCH "19")
set(PROJECT_VERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}")
# external libs
add_subdirectory(./external) # before increasing warn levels, sad :(
# set .so library version / following libtool scheme
# https://www.gnu.org/software/libtool/manual/libtool.html#Updating-version-info
file(STRINGS ${toxcore_SOURCE_DIR}/so.version SOVERSION_CURRENT REGEX "^CURRENT=[0-9]+$")
string(SUBSTRING "${SOVERSION_CURRENT}" 8 -1 SOVERSION_CURRENT)
file(STRINGS ${toxcore_SOURCE_DIR}/so.version SOVERSION_REVISION REGEX "^REVISION=[0-9]+$")
string(SUBSTRING "${SOVERSION_REVISION}" 9 -1 SOVERSION_REVISION)
file(STRINGS ${toxcore_SOURCE_DIR}/so.version SOVERSION_AGE REGEX "^AGE=[0-9]+$")
string(SUBSTRING "${SOVERSION_AGE}" 4 -1 SOVERSION_AGE)
# account for some libtool magic, see other/version-sync script for details
math(EXPR SOVERSION_MAJOR ${SOVERSION_CURRENT}-${SOVERSION_AGE})
set(SOVERSION "${SOVERSION_MAJOR}.${SOVERSION_AGE}.${SOVERSION_REVISION}")
message("SOVERSION: ${SOVERSION}")
################################################################################
#
# :: Dependencies and configuration
#
################################################################################
include(CTest)
include(ModulePackage)
include(StrictAbi)
include(GNUInstallDirs)
if(APPLE)
include(MacRpath)
endif()
enable_testing()
find_package(GTest)
set(CMAKE_MACOSX_RPATH ON)
# Set standard version for compiler.
if(MSVC AND MSVC_TOOLSET_VERSION LESS 143)
# https://developercommunity.visualstudio.com/t/older-winsdk-headers-are-incompatible-with-zcprepr/1593479
set(CMAKE_C_STANDARD 99)
else()
set(CMAKE_C_STANDARD 11)
endif()
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_C_EXTENSIONS OFF)
set(CMAKE_CXX_EXTENSIONS OFF)
message(STATUS "Supported C compiler features = ${CMAKE_C_COMPILE_FEATURES}")
message(STATUS "Supported C++ compiler features = ${CMAKE_CXX_COMPILE_FEATURES}")
# bump up warning levels appropriately for clang, gcc & msvc
if (${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
add_compile_options(
-Wall -Wextra # Reasonable and standard
-Wpedantic # Warn if non-standard C++ is used
-Wunused # Warn on anything being unused
#-Wconversion # Warn on type conversions that may lose data
#-Wsign-conversion # Warn on sign conversions
-Wshadow # Warn if a variable declaration shadows one from a parent context
)
# Enable some warnings if we know the compiler.
if(MSVC)
add_compile_options(/W4 /analyze)
add_compile_options(/wd4100) # unreferenced formal parameter
add_compile_options(/wd4267) # narrowing conversion
add_compile_options(/wd4244) # narrowing conversion
add_compile_options(/wd4127) # conditional expression is constant
add_compile_options(/wd4995) # #pragma deprecated
add_compile_options(/wd4018) # signed/unsigned compare
add_compile_options(/wd4310) # cast truncates constant value
add_compile_options(/wd4389) # signed/unsigned compare
add_compile_options(/wd4245) # signed/unsigned assign/return/function call
add_compile_options(/wd4200) # nonstandard extension used: zero-sized array in struct/union
add_compile_options(/wd4702) # unreachable code
add_compile_options(/wd6340) # unsigned int passed to signed parameter
add_compile_options(/wd6326) # potential comparison of a constant with another constant
# TODO(iphydf): Look into these
add_compile_options(/wd4996) # use WSAAddressToStringW() instead of WSAAddressToStringA()
add_compile_options(/wd6255) # don't use alloca
add_compile_options(/wd6385) # reading invalid data
add_compile_options(/wd6001) # using uninitialized memory
add_compile_options(/wd6101) # returning uninitialized memory
add_compile_options(/wd6386) # buffer overrun
add_compile_options(/wd6011) # NULL dereference
add_compile_options(/wd6031) # sscanf return value ignored
add_compile_options(/wd6387) # passing NULL to fwrite
if (NOT WIN32)
#link_libraries(-fsanitize=address,undefined)
#link_libraries(-fsanitize=undefined)
endif()
elseif (${CMAKE_CXX_COMPILER_ID} STREQUAL "MSVC")
if (CMAKE_CXX_FLAGS MATCHES "/W[0-4]")
string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4")
endif()
endif()
set(MIN_LOGGER_LEVEL "" CACHE STRING "Logging level to use (TRACE, DEBUG, INFO, WARNING, ERROR)")
if(MIN_LOGGER_LEVEL)
if(("${MIN_LOGGER_LEVEL}" STREQUAL "TRACE") OR
("${MIN_LOGGER_LEVEL}" STREQUAL "DEBUG") OR
("${MIN_LOGGER_LEVEL}" STREQUAL "INFO") OR
("${MIN_LOGGER_LEVEL}" STREQUAL "WARNING") OR
("${MIN_LOGGER_LEVEL}" STREQUAL "ERROR"))
add_definitions(-DMIN_LOGGER_LEVEL=LOGGER_LEVEL_${MIN_LOGGER_LEVEL})
else()
message(FATAL_ERROR "Unknown value provided for MIN_LOGGER_LEVEL: \"${MIN_LOGGER_LEVEL}\", must be one of TRACE, DEBUG, INFO, WARNING or ERROR")
endif()
endif()
# cmake setup end
option(EXPERIMENTAL_API "Install experimental header file with unstable API" OFF)
add_subdirectory(./src)
option(USE_IPV6 "Use IPv6 in tests" ON)
if(NOT USE_IPV6)
add_definitions(-DUSE_IPV6=0)
endif()
option(BUILD_MISC_TESTS "Build additional tests" OFF)
option(BUILD_FUN_UTILS "Build additional just for fun utilities" OFF)
option(UNITTEST "Enable unit tests (disable if you don't have a working gmock or gtest)" ON)
option(AUTOTEST "Enable autotests (mainly for CI)" OFF)
if(AUTOTEST)
option(NON_HERMETIC_TESTS "Whether to build and run tests that depend on an internet connection" OFF)
option(PROXY_TEST "Enable proxy test (requires other/proxy/proxy_server.go to be running)" OFF)
endif()
option(BUILD_TOXAV "Whether to build the tox AV library" ON)
option(MUST_BUILD_TOXAV "Fail the build if toxav cannot be built" OFF)
option(DHT_BOOTSTRAP "Enable building of DHT_bootstrap" ON)
option(BOOTSTRAP_DAEMON "Enable building of tox-bootstrapd" ON)
if(BOOTSTRAP_DAEMON AND WIN32)
message(WARNING "Building tox-bootstrapd for Windows is not supported, disabling")
set(BOOTSTRAP_DAEMON OFF)
endif()
option(BUILD_FUZZ_TESTS "Build fuzzing harnesses" OFF)
if(MSVC)
option(MSVC_STATIC_SODIUM "Whether to link libsodium statically for MSVC" OFF)
if(MSVC_STATIC_SODIUM)
add_definitions(-DSODIUM_STATIC=1)
endif()
endif()
include(Dependencies)
if(MUST_BUILD_TOXAV)
set(NO_TOXAV_ERROR_TYPE SEND_ERROR)
else()
set(NO_TOXAV_ERROR_TYPE WARNING)
endif()
if(BUILD_TOXAV)
if(NOT OPUS_FOUND)
message(${NO_TOXAV_ERROR_TYPE} "Option BUILD_TOXAV is enabled but required library OPUS was not found.")
set(BUILD_TOXAV OFF)
endif()
if(NOT VPX_FOUND)
message(${NO_TOXAV_ERROR_TYPE} "Option BUILD_TOXAV is enabled but required library VPX was not found.")
set(BUILD_TOXAV OFF)
endif()
endif()
# Disable float/double packing in CMP (C MessagePack library).
# We don't transfer floats over the network, so we disable this functionality.
add_definitions(-DCMP_NO_FLOAT=1)
################################################################################
#
# :: Tox Core Library
#
################################################################################
# toxcore_PKGCONFIG_LIBS is what's added to the Libs: line in toxcore.pc. It
# needs to contain all the libraries a program using toxcore should link against
# if it's statically linked. If it's dynamically linked, there is no need to
# explicitly link against all the dependencies, but it doesn't harm much(*)
# either.
#
# (*) It allows client code to use symbols from our dependencies without
# explicitly linking against them.
set(toxcore_PKGCONFIG_LIBS)
# Requires: in pkg-config file.
set(toxcore_PKGCONFIG_REQUIRES)
set(toxcore_SOURCES
third_party/cmp/cmp.c
third_party/cmp/cmp.h
toxcore/announce.c
toxcore/announce.h
toxcore/bin_pack.c
toxcore/bin_pack.h
toxcore/bin_unpack.c
toxcore/bin_unpack.h
toxcore/ccompat.c
toxcore/ccompat.h
toxcore/crypto_core.c
toxcore/crypto_core.h
toxcore/crypto_core_pack.c
toxcore/crypto_core_pack.h
toxcore/DHT.c
toxcore/DHT.h
toxcore/events/conference_connected.c
toxcore/events/conference_invite.c
toxcore/events/conference_message.c
toxcore/events/conference_peer_list_changed.c
toxcore/events/conference_peer_name.c
toxcore/events/conference_title.c
toxcore/events/dht_get_nodes_response.c
toxcore/events/events_alloc.c
toxcore/events/events_alloc.h
toxcore/events/file_chunk_request.c
toxcore/events/file_recv.c
toxcore/events/file_recv_chunk.c
toxcore/events/file_recv_control.c
toxcore/events/friend_connection_status.c
toxcore/events/friend_lossless_packet.c
toxcore/events/friend_lossy_packet.c
toxcore/events/friend_message.c
toxcore/events/friend_name.c
toxcore/events/friend_read_receipt.c
toxcore/events/friend_request.c
toxcore/events/friend_status.c
toxcore/events/friend_status_message.c
toxcore/events/friend_typing.c
toxcore/events/self_connection_status.c
toxcore/events/group_custom_packet.c
toxcore/events/group_custom_private_packet.c
toxcore/events/group_invite.c
toxcore/events/group_join_fail.c
toxcore/events/group_message.c
toxcore/events/group_moderation.c
toxcore/events/group_password.c
toxcore/events/group_peer_exit.c
toxcore/events/group_peer_join.c
toxcore/events/group_peer_limit.c
toxcore/events/group_peer_name.c
toxcore/events/group_peer_status.c
toxcore/events/group_privacy_state.c
toxcore/events/group_private_message.c
toxcore/events/group_self_join.c
toxcore/events/group_topic.c
toxcore/events/group_topic_lock.c
toxcore/events/group_voice_state.c
toxcore/forwarding.c
toxcore/forwarding.h
toxcore/friend_connection.c
toxcore/friend_connection.h
toxcore/friend_requests.c
toxcore/friend_requests.h
toxcore/group.c
toxcore/group_chats.c
toxcore/group_chats.h
toxcore/group_common.h
toxcore/group_connection.c
toxcore/group_connection.h
toxcore/group.h
toxcore/group_announce.c
toxcore/group_announce.h
toxcore/group_moderation.c
toxcore/group_moderation.h
toxcore/group_onion_announce.c
toxcore/group_onion_announce.h
toxcore/group_pack.c
toxcore/group_pack.h
toxcore/LAN_discovery.c
toxcore/LAN_discovery.h
toxcore/list.c
toxcore/list.h
toxcore/logger.c
toxcore/logger.h
toxcore/Messenger.c
toxcore/Messenger.h
toxcore/mem.c
toxcore/mem.h
toxcore/mono_time.c
toxcore/mono_time.h
toxcore/net_crypto.c
toxcore/net_crypto.h
toxcore/network.c
toxcore/network.h
toxcore/onion_announce.c
toxcore/onion_announce.h
toxcore/onion.c
toxcore/onion_client.c
toxcore/onion_client.h
toxcore/onion.h
toxcore/ping_array.c
toxcore/ping_array.h
toxcore/ping.c
toxcore/ping.h
toxcore/shared_key_cache.c
toxcore/shared_key_cache.h
toxcore/state.c
toxcore/state.h
toxcore/TCP_client.c
toxcore/TCP_client.h
toxcore/TCP_common.c
toxcore/TCP_common.h
toxcore/TCP_connection.c
toxcore/TCP_connection.h
toxcore/TCP_server.c
toxcore/TCP_server.h
toxcore/timed_auth.c
toxcore/timed_auth.h
toxcore/tox_api.c
toxcore/tox.c
toxcore/tox_dispatch.c
toxcore/tox_dispatch.h
toxcore/tox_event.c
toxcore/tox_event.h
toxcore/tox_events.c
toxcore/tox_events.h
toxcore/tox.h
toxcore/tox_private.c
toxcore/tox_private.h
toxcore/tox_pack.c
toxcore/tox_pack.h
toxcore/tox_unpack.c
toxcore/tox_unpack.h
toxcore/util.c
toxcore/util.h)
if(TARGET unofficial-sodium::sodium)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} unofficial-sodium::sodium)
else()
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} ${LIBSODIUM_LIBRARIES})
set(toxcore_LINK_DIRECTORIES ${toxcore_LINK_DIRECTORIES} ${LIBSODIUM_LIBRARY_DIRS})
set(toxcore_INCLUDE_DIRECTORIES ${toxcore_INCLUDE_DIRECTORIES} ${LIBSODIUM_INCLUDE_DIRS})
set(toxcore_COMPILE_OPTIONS ${toxcore_COMPILE_OPTIONS} ${LIBSODIUM_CFLAGS_OTHER})
endif()
set(toxcore_PKGCONFIG_REQUIRES ${toxcore_PKGCONFIG_REQUIRES} libsodium)
set(toxcore_API_HEADERS
${toxcore_SOURCE_DIR}/toxcore/tox.h^tox
${toxcore_SOURCE_DIR}/toxcore/tox_events.h^tox
${toxcore_SOURCE_DIR}/toxcore/tox_dispatch.h^tox)
if(EXPERIMENTAL_API)
set(toxcore_API_HEADERS ${toxcore_API_HEADERS}
${toxcore_SOURCE_DIR}/toxcore/tox_private.h^tox)
endif()
################################################################################
#
# :: Audio/Video Library
#
################################################################################
if(BUILD_TOXAV)
set(toxcore_SOURCES ${toxcore_SOURCES}
toxav/audio.c
toxav/audio.h
toxav/bwcontroller.c
toxav/bwcontroller.h
toxav/groupav.c
toxav/groupav.h
toxav/msi.c
toxav/msi.h
toxav/ring_buffer.c
toxav/ring_buffer.h
toxav/rtp.c
toxav/rtp.h
toxav/toxav.c
toxav/toxav.h
toxav/toxav_old.c
toxav/video.c
toxav/video.h)
set(toxcore_API_HEADERS ${toxcore_API_HEADERS}
${toxcore_SOURCE_DIR}/toxav/toxav.h^toxav)
if(MSVC)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} PkgConfig::OPUS PkgConfig::VPX)
else()
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} ${OPUS_LIBRARIES} ${VPX_LIBRARIES})
set(toxcore_LINK_DIRECTORIES ${toxcore_LINK_DIRECTORIES} ${OPUS_LIBRARY_DIRS} ${VPX_LIBRARY_DIRS})
set(toxcore_INCLUDE_DIRECTORIES ${toxcore_INCLUDE_DIRECTORIES} ${OPUS_INCLUDE_DIRS} ${VPX_INCLUDE_DIRS})
set(toxcore_COMPILE_OPTIONS ${toxcore_COMPILE_OPTIONS} ${OPUS_CFLAGS_OTHER} ${VPX_CFLAGS_OTHER})
endif()
set(toxcore_PKGCONFIG_REQUIRES ${toxcore_PKGCONFIG_REQUIRES} opus vpx)
endif()
################################################################################
#
# :: Block encryption libraries
#
################################################################################
set(toxcore_SOURCES ${toxcore_SOURCES}
toxencryptsave/toxencryptsave.c
toxencryptsave/toxencryptsave.h)
set(toxcore_API_HEADERS ${toxcore_API_HEADERS}
${toxcore_SOURCE_DIR}/toxencryptsave/toxencryptsave.h^tox)
################################################################################
#
# :: System dependencies
#
################################################################################
# These need to come after other dependencies, since e.g. libvpx may depend on
# pthread, but doesn't list it in VPX_LIBRARIES. We're adding it here, after
# any potential libvpx linking.
message("CMAKE_THREAD_LIBS_INIT: ${CMAKE_THREAD_LIBS_INIT}")
if(CMAKE_THREAD_LIBS_INIT)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
set(toxcore_PKGCONFIG_LIBS ${toxcore_PKGCONFIG_LIBS} ${CMAKE_THREAD_LIBS_INIT})
endif()
if(NSL_LIBRARIES)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} ${NSL_LIBRARIES})
set(toxcore_PKGCONFIG_LIBS ${toxcore_PKGCONFIG_LIBS} -lnsl)
endif()
if(RT_LIBRARIES)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} ${RT_LIBRARIES})
set(toxcore_PKGCONFIG_LIBS ${toxcore_PKGCONFIG_LIBS} -lrt)
endif()
if(SOCKET_LIBRARIES)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} ${SOCKET_LIBRARIES})
set(toxcore_PKGCONFIG_LIBS ${toxcore_PKGCONFIG_LIBS} -lsocket)
endif()
if(TARGET PThreads4W::PThreads4W)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} PThreads4W::PThreads4W)
elseif(TARGET Threads::Threads)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} Threads::Threads)
endif()
if(WIN32)
set(toxcore_LINK_LIBRARIES ${toxcore_LINK_LIBRARIES} iphlpapi ws2_32)
endif()
################################################################################
#
# :: All layers together in one library for ease of use
#
################################################################################
# Create combined library from all the sources.
if(ENABLE_SHARED)
add_library(toxcore_shared SHARED ${toxcore_SOURCES})
set_target_properties(toxcore_shared PROPERTIES OUTPUT_NAME toxcore)
target_link_libraries(toxcore_shared PRIVATE ${toxcore_LINK_LIBRARIES})
target_link_directories(toxcore_shared PUBLIC ${toxcore_LINK_DIRECTORIES})
target_include_directories(toxcore_shared SYSTEM PRIVATE ${toxcore_INCLUDE_DIRECTORIES})
target_compile_options(toxcore_shared PRIVATE ${toxcore_COMPILE_OPTIONS})
endif()
if(ENABLE_STATIC)
add_library(toxcore_static STATIC ${toxcore_SOURCES})
if(NOT MSVC)
set_target_properties(toxcore_static PROPERTIES OUTPUT_NAME toxcore)
endif()
target_link_libraries(toxcore_static PRIVATE ${toxcore_LINK_LIBRARIES})
target_link_directories(toxcore_static PUBLIC ${toxcore_LINK_DIRECTORIES})
target_include_directories(toxcore_static SYSTEM PRIVATE ${toxcore_INCLUDE_DIRECTORIES})
target_compile_options(toxcore_static PRIVATE ${toxcore_COMPILE_OPTIONS})
endif()
if(BUILD_FUZZ_TESTS)
add_library(toxcore_fuzz STATIC ${toxcore_SOURCES})
target_link_libraries(toxcore_fuzz PRIVATE ${toxcore_LINK_LIBRARIES})
target_link_directories(toxcore_fuzz PUBLIC ${toxcore_LINK_DIRECTORIES})
target_include_directories(toxcore_fuzz SYSTEM PRIVATE ${toxcore_INCLUDE_DIRECTORIES})
target_compile_options(toxcore_fuzz PRIVATE ${toxcore_COMPILE_OPTIONS})
target_compile_definitions(toxcore_fuzz PUBLIC "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION")
endif()
# Make version script (on systems that support it) to limit symbol visibility.
make_version_script(toxcore ${toxcore_API_HEADERS})
# Generate pkg-config file, install library to "${CMAKE_INSTALL_LIBDIR}" and install headers to
# "${CMAKE_INSTALL_INCLUDEDIR}/tox".
install_module(toxcore DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/tox)
################################################################################
#
# :: Unit tests: no networking, just pure function calls.
#
################################################################################
add_library(test_util STATIC
toxcore/DHT_test_util.cc
toxcore/DHT_test_util.hh
toxcore/crypto_core_test_util.cc
toxcore/crypto_core_test_util.hh
toxcore/mem_test_util.cc
toxcore/mem_test_util.hh
toxcore/network_test_util.cc
toxcore/network_test_util.hh
toxcore/test_util.cc
toxcore/test_util.hh)
function(unit_test subdir target)
add_executable(unit_${target}_test ${subdir}/${target}_test.cc)
target_link_libraries(unit_${target}_test PRIVATE test_util)
if(TARGET toxcore_static)
target_link_libraries(unit_${target}_test PRIVATE toxcore_static)
else()
target_link_libraries(unit_${target}_test PRIVATE toxcore_shared)
endif()
target_link_libraries(unit_${target}_test PRIVATE GTest::gtest GTest::gtest_main GTest::gmock)
set_target_properties(unit_${target}_test PROPERTIES COMPILE_FLAGS "${TEST_CXX_FLAGS}")
add_test(NAME ${target} COMMAND ${CROSSCOMPILING_EMULATOR} unit_${target}_test)
set_property(TEST ${target} PROPERTY ENVIRONMENT "LLVM_PROFILE_FILE=${target}.profraw")
endfunction()
# The actual unit tests follow.
#
if(UNITTEST AND TARGET GTest::gtest AND TARGET GTest::gmock)
unit_test(toxav ring_buffer)
unit_test(toxav rtp)
unit_test(toxcore DHT)
unit_test(toxcore bin_pack)
unit_test(toxcore crypto_core)
unit_test(toxcore group_announce)
unit_test(toxcore group_moderation)
unit_test(toxcore list)
unit_test(toxcore mem)
unit_test(toxcore mono_time)
unit_test(toxcore ping_array)
unit_test(toxcore test_util)
unit_test(toxcore tox)
unit_test(toxcore util)
endif()
add_subdirectory(testing)
################################################################################
#
# :: Automated regression tests: create a tox network and run integration tests
#
################################################################################
if(AUTOTEST)
add_subdirectory(auto_tests)
endif()
################################################################################
#
# :: Bootstrap daemon
#
################################################################################
if(DHT_BOOTSTRAP)
add_executable(DHT_bootstrap
other/DHT_bootstrap.c
other/bootstrap_node_packets.c)
if(TARGET toxcore_static)
target_link_libraries(DHT_bootstrap PRIVATE toxcore_static)
else()
target_link_libraries(DHT_bootstrap PRIVATE toxcore_shared)
endif()
target_link_libraries(DHT_bootstrap PRIVATE misc_tools)
if(TARGET unofficial-sodium::sodium)
target_link_libraries(DHT_bootstrap PRIVATE unofficial-sodium::sodium)
endif()
if(TARGET PThreads4W::PThreads4W)
target_link_libraries(DHT_bootstrap PRIVATE PThreads4W::PThreads4W)
elseif(TARGET Threads::Threads)
target_link_libraries(DHT_bootstrap PRIVATE Threads::Threads)
endif()
install(TARGETS DHT_bootstrap RUNTIME DESTINATION bin)
endif()
if(BOOTSTRAP_DAEMON)
if(LIBCONFIG_FOUND)
add_subdirectory(other/bootstrap_daemon)
else()
message(WARNING "Option BOOTSTRAP_DAEMON is enabled but required library LIBCONFIG was not found.")
set(BOOTSTRAP_DAEMON OFF)
endif()
endif()
if(BUILD_FUN_UTILS)
add_subdirectory(other/fun)
endif()
if (BUILD_FUZZ_TESTS)
add_subdirectory(testing/fuzzing)
endif()

View File

@ -1,21 +0,0 @@
{
"version": 3,
"configurePresets": [
{
"name": "windows-default",
"binaryDir": "${sourceDir}/_build",
"cacheVariables": {
"ENABLE_SHARED": true,
"ENABLE_STATIC": true,
"AUTOTEST": true,
"BUILD_MISC_TESTS": true,
"BOOTSTRAP_DAEMON": false,
"MUST_BUILD_TOXAV": true,
"TEST_TIMEOUT_SECONDS": "60",
"CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS": true,
"CMAKE_COMPILE_WARNING_AS_ERROR": true,
"CMAKE_TOOLCHAIN_FILE": "$env{VCPKG_ROOT}/scripts/buildsystems/vcpkg.cmake"
}
}
]
}

View File

@ -1,455 +0,0 @@
# Installation instructions
These instructions will guide you through the process of building and installing
the toxcore library and its components, as well as getting already pre-built
binaries.
## Table of contents
- [Overview](#overview)
- [Components](#components)
- [Main](#main)
- [Secondary](#secondary)
- [Building](#building)
- [Requirements](#requirements)
- [Library dependencies](#library-dependencies)
- [Compiler requirements](#compiler-requirements)
- [Build system requirements](#build-system-requirements)
- [CMake options](#cmake-options)
- [Build process](#build-process)
- [Unix-like](#unix-like)
- [Windows](#windows)
- [Building on Windows host](#building-on-windows-host)
- [Microsoft Visual Studio's Developer Command Prompt](#microsoft-visual-studios-developer-command-prompt)
- [MSYS/Cygwin](#msyscygwin)
- [Cross-compiling from Linux](#cross-compiling-from-linux)
- [Pre-built binaries](#pre-built-binaries)
- [Linux](#linux)
## Overview
### Components
#### Main
This repository, although called `toxcore`, in fact contains several libraries
besides `toxcore` which complement it, as well as several executables. However,
note that although these are separate libraries, at the moment, when building
the libraries, they are all merged into a single `toxcore` library. Here is the
full list of the main components that can be built using the CMake, their
dependencies and descriptions.
| Name | Type | Dependencies | Platform | Description |
| ---------------- | ---------- | ---------------------------------- | -------------- | -------------------------------------------------------------------------------------------------------------------------- |
| `toxcore` | Library | libsodium, libm, libpthread, librt | Cross-platform | The main Tox library that provides the messenger functionality. |
| `toxav` | Library | libtoxcore, libopus, libvpx | Cross-platform | Provides audio/video functionality. |
| `toxencryptsave` | Library | libtoxcore, libsodium | Cross-platform | Provides encryption of Tox profiles (savedata), as well as arbitrary data. |
| `DHT_bootstrap` | Executable | libtoxcore | Cross-platform | A simple DHT bootstrap node. |
| `tox-bootstrapd` | Executable | libtoxcore, libconfig | Unix-like | Highly configurable DHT bootstrap node daemon (systemd, SysVinit, Docker). |
| `cmp` | Library | | Cross-platform | C implementation of the MessagePack serialization format. [https://github.com/camgunz/cmp](https://github.com/camgunz/cmp) |
#### Secondary
There are some programs that are not built by default which you might find
interesting. You need to pass `-DBUILD_FUN_UTILS=ON` to cmake to build them.
##### Vanity key generators
Can be used to generate vanity Tox Ids or DHT bootstrap node public keys.
| Name | Type | Dependencies | Platform | Description |
| ---------------- | ---------- | ----------------- | -------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `cracker` | Executable | libsodium, OpenMP | Cross-platform | Tries to find a curve25519 key pair, hex representation of the public key of which starts with a specified byte sequence. Multi-threaded. |
| `cracker_simple` | Executable | libsodium | Cross-platform | Tries to find a curve25519 key pair, hex representation of the public key of which starts with a specified byte sequence. Single-threaded. |
| `strkey` | Executable | libsodium | Cross-platform | Tries to find a curve25519 key pair, hex representation of the public key of which contains a specified byte sequence at a specified or any position at all. Single-threaded. |
##### Key file generators
Useful for generating Tox profiles from the output of the vanity key generators,
as well as generating random Tox profiles.
| Name | Type | Dependencies | Platform | Description |
| ------------------------- | ---------- | --------------------- | -------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `make-funny-savefile` | Script | python | Cross-platform | Generates a Tox profile file (savedata file) with the provided key pair. |
| `create_bootstrap_keys` | Executable | libsodium | Cross-platform | Generates a keys file for tox-bootstrapd with either the provided or a random key pair. |
| `create_minimal_savedata` | Executable | libsodium | Cross-platform | Generates a minimal Tox profile file (savedata file) with either the provided or a random key pair, printing the generated Tox Id and secret & public key information. |
| `create_savedata` | Executable | libsodium, libtoxcore | Cross-platform | Generates a Tox profile file (savedata file) with either the provided or a random key pair using libtoxcore, printing the generated Tox Id and secret & public key information. |
| `save-generator` | Executable | libtoxcore | Cross-platform | Generates a Tox profile file (savedata file) with a random key pair using libtoxcore, setting the specified user name, going online and adding specified Tox Ids as friends. |
##### Other
| Name | Type | Dependencies | Platform | Description |
| --------------------- | ---------- | ------------ | -------------- | ----------------------------------------------------------------------------------------------------------------------------------------- |
| `bootstrap_node_info` | Script | python3 | Cross-platform | Prints version and Message Of The Day (MOTD) information of the specified DHT bootstrap node, given the node doesn't have those disabled. |
| `sign` | Executable | libsodium | Cross-platform | Signs a file with a ed25519 key. |
## Building
### Requirements
#### Library dependencies
Library dependencies are listed in the [components](#components) table. The
dependencies need to be satisfied for the components to be built. Note that if
you don't have a dependency for some component, e.g. you don't have `libopus`
installed required for building `toxav` component, building of that component is
silently disabled.
Be advised that due to the addition of `cmp` as a submodule, you now also need
to initialize the git submodules required by toxcore. This can be done by
cloning the repo with the addition of `--recurse-submodules` or by running
`git submodule update --init` in the root directory of the repo.
#### Compiler requirements
The supported compilers are GCC, Clang and MinGW.
In theory, any compiler that fully supports C99 and accepts GCC flags should
work.
There is a partial and experimental support of Microsoft Visual C++ compiler. We
welcome any patches that help improve it.
You should have a C99 compatible compiler in order to build the main components.
The secondary components might require the compiler to support GNU extensions.
#### Build system requirements
To build the main components you need to have CMake of at least 2.8.6 version
installed. You also need to have pkg-config installed, the build system uses it
to find dependency libraries.
There is some experimental accommodation for building natively on Windows, i.e.
without having to use MSYS/Cygwin and pkg-config, but it uses exact hardcoded
paths for finding libraries and supports building only of some of toxcore
components, so your mileage might vary.
### CMake options
There are some options that are available to configure the build.
| Name | Description | Expected Value | Default Value |
| ----------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------- |
| `AUTOTEST` | Enable autotests (mainly for CI). | ON or OFF | OFF |
| `BOOTSTRAP_DAEMON` | Enable building of tox-bootstrapd, the DHT bootstrap node daemon. For Unix-like systems only. | ON or OFF | ON |
| `BUILD_FUN_UTILS` | Build additional just for fun utilities. | ON or OFF | OFF |
| `BUILD_FUZZ_TESTS` | Build fuzzing harnesses. | ON or OFF | OFF |
| `BUILD_MISC_TESTS` | Build additional tests. | ON or OFF | OFF |
| `BUILD_TOXAV` | Whether to build the toxav library. | ON or OFF | ON |
| `CMAKE_BUILD_TYPE` | Specifies the build type on single-configuration generators (e.g. make or ninja). | Debug, Release, RelWithDebInfo, MinSizeRel | Empty string. |
| `CMAKE_INSTALL_PREFIX` | Path to where everything should be installed. | Directory path. | Platform-dependent. Refer to CMake documentation. |
| `DHT_BOOTSTRAP` | Enable building of `DHT_bootstrap`. | ON or OFF | ON |
| `ENABLE_SHARED` | Build shared (dynamic) libraries for all modules. | ON or OFF | ON |
| `ENABLE_STATIC` | Build static libraries for all modules. | ON or OFF | ON |
| `EXPERIMENTAL_API` | Install experimental header file with unstable API. | ON or OFF | OFF |
| `FLAT_OUTPUT_STRUCTURE` | Whether to produce output artifacts in {bin,lib}. | ON or OFF | OFF |
| `FULLY_STATIC` | Build fully static executables. | ON or OFF | OFF |
| `MIN_LOGGER_LEVEL` | Logging level to use. | TRACE, DEBUG, INFO, WARNING, ERROR or nothing (empty string) for default. | Empty string. |
| `MSVC_STATIC_SODIUM` | Whether to link libsodium statically for MSVC. | ON or OFF | OFF |
| `MUST_BUILD_TOXAV` | Fail the build if toxav cannot be built. | ON or OFF | OFF |
| `NON_HERMETIC_TESTS` | Whether to build and run tests that depend on an internet connection. | ON or OFF | OFF |
| `PROXY_TEST` | Enable proxy test (requires `other/proxy/proxy_server.go` to be running). | ON or OFF | OFF |
| `STRICT_ABI` | Enforce strict ABI export in dynamic libraries. | ON or OFF | OFF |
| `TEST_TIMEOUT_SECONDS` | Limit runtime of each test to the number of seconds specified. | Positive number or nothing (empty string). | Empty string. |
| `UNITTEST` | Enable unit tests (disable if you don't have a working gmock or gtest). | ON or OFF | ON |
| `USE_IPV6` | Use IPv6 in tests. | ON or OFF | ON |
You can get this list of option using the following commands
```sh
cmake -B _build -LAH
```
or
```sh
grep "option(" CMakeLists.txt cmake/*
grep "set(.* CACHE" CMakeLists.txt cmake/*
```
Note that some options might be considered only if other options are enabled.
Example of calling cmake with options
```sh
cmake \
-D ENABLE_STATIC=OFF \
-D ENABLE_SHARED=ON \
-D CMAKE_INSTALL_PREFIX="${PWD}/prefix" \
-D CMAKE_BUILD_TYPE=Release \
-D TEST_TIMEOUT_SECONDS=120 \
..
```
### Building tests
In addition to the integration tests ("autotests") and miscellaneous tests
enabled by cmake variables described above, there are unit tests which will be
built if the source distribution of gtest (the Google Unit Test framework) is
found by cmake in `c-toxcore/third_party`. This can be achieved by running 'git
clone https://github.com/google/googletest` from that directory.
### Build process
#### Unix-like
Assuming all the [requirements](#requirements) are met, just run
```sh
mkdir _build
cd _build
cmake ..
make
make install
```
or shorter
```sh
cmake -B _build
cmake -B _build --target install
```
#### Windows
##### Building on Windows host
###### Microsoft Visual Studio's Developer Command Prompt
In addition to meeting the [requirements](#requirements), you need a version of
Visual Studio (the
[community edition](https://www.visualstudio.com/vs/visual-studio-express/) is
enough) and a CMake version that's compatible with the Visual Studio version
you're using.
You must also ensure that the msvc versions of dependencies you're using are
placed in the correct folders.
For libsodium that is `c-toxcore/third_party/libsodium`, and for pthreads-w32,
it's `c-toxcore/third_party/pthreads-win32`
Once all of this is done, from the **Developer Command Prompt for VS**, simply
run
```
mkdir _build
cd _build
cmake ..
msbuild ALL_BUILD.vcxproj
```
###### MSYS/Cygwin
Download Cygwin
([32-bit](https://cygwin.com/setup-x86.exe)/[64-bit](https://cygwin.com/setup-x86_64.exe))
Search and select exactly these packages in Devel category:
- mingw64-i686-gcc-core (32-bit) / mingw64-x86_64-gcc-core (64-bit)
- mingw64-i686-gcc-g++ (32-bit) / mingw64-x86_64-gcc-g++ (64-bit)
- make
- cmake
- libtool
- autoconf
- automake
- tree
- curl
- perl
- yasm
- pkg-config
To handle Windows EOL correctly run the following in the Cygwin Terminal:
```sh
echo '
export SHELLOPTS
set -o igncr
' > ~/.bash_profile
```
Download toxcore source code and extract it to a folder.
Open Cygwin Terminal in the toxcore folder and run
`./other/windows_build_script_toxcore.sh` to start the build process.
Toxcore build result files will appear in `/root/prefix/` relatively to Cygwin
folder (default `C:\cygwin64`).
Dependency versions can be customized in
`./other/windows_build_script_toxcore.sh` and described in the section below.
##### Cross-compiling from Linux
These cross-compilation instructions were tested on and written for 64-bit
Ubuntu 16.04. You could generalize them for any Linux system, the only
requirements are that you have Docker version of >= 1.9.0 and you are running
64-bit system.
The cross-compilation is fully automated by a parameterized
[Dockerfile](/other/docker/windows/Dockerfile).
Install Docker
```sh
apt-get update
apt-get install docker.io
```
Get the toxcore source code and navigate to `other/docker/windows`.
Build the container image based on the Dockerfile. The following options are
available to customize the building of the container image.
| Name | Description | Expected Value | Default Value |
| -------------------------- | ----------------------------------------------------- | ----------------------------------- | ------------- |
| `SUPPORT_ARCH_i686` | Support building 32-bit toxcore. | "true" or "false" (case sensitive). | true |
| `SUPPORT_ARCH_x86_64` | Support building 64-bit toxcore. | "true" or "false" (case sensitive). | true |
| `SUPPORT_TEST` | Support running toxcore automated tests. | "true" or "false" (case sensitive). | false |
| `VERSION_OPUS` | Version of libopus to build toxcore with. | Numeric version number. | 1.4 |
| `VERSION_SODIUM` | Version of libsodium to build toxcore with. | Numeric version number. | 1.0.19 |
| `VERSION_VPX` | Version of libvpx to build toxcore with. | Numeric version number. | 1.14.0 |
| `ENABLE_HASH_VERIFICATION` | Verify the hashes of the default dependency versions. | "true" or "false" (case sensitive). | true |
Example of building a container image with options
```sh
cd other/docker/windows
docker build \
--build-arg SUPPORT_TEST=true \
-t toxcore \
.
```
Run the container to build toxcore. The following options are available to
customize the running of the container image.
| Name | Description | Expected Value | Default Value |
| -------------------- | ------------------------------------------------------------------------------------------ | ----------------------------------- | ------------------------------------------ |
| `ALLOW_TEST_FAILURE` | Don't stop if a test suite fails. | "true" or "false" (case sensitive). | `false` |
| `ENABLE_ARCH_i686` | Build 32-bit toxcore. The image should have been built with `SUPPORT_ARCH_i686` enabled. | "true" or "false" (case sensitive). | `true` |
| `ENABLE_ARCH_x86_64` | Build 64-bit toxcore. The image should have been built with `SUPPORT_ARCH_x86_64` enabled. | "true" or "false" (case sensitive). | `true` |
| `ENABLE_TEST` | Run the test suite. The image should have been built with `SUPPORT_TEST` enabled. | "true" or "false" (case sensitive). | `false` |
| `EXTRA_CMAKE_FLAGS` | Extra arguments to pass to the CMake command when building toxcore. | CMake options. | `-DTEST_TIMEOUT_SECONDS=90 -DUSE_IPV6=OFF` |
Example of running the container with options
```sh
docker run \
-e ENABLE_TEST=true \
-e ALLOW_TEST_FAILURE=true \
-v /path/to/toxcore/sourcecode:/toxcore \
-v /path/to/where/output/build/result:/prefix \
-t \
--rm \
toxcore
```
After the build succeeds, you should see the built toxcore libraries in
`/path/to/where/output/build/result`.
The file structure should look similar to the following
```
result
├── [4.0K] i686
│   ├── [ 36K] bin
│   │   ├── [636K] DHT_bootstrap.exe
│   │   ├── [572K] cracker.exe
│   │   ├── [359K] cracker_simple.exe
│   │   ├── [378K] create_bootstrap_keys.exe
│   │   ├── [378K] create_minimal_savedata.exe
│   │   ├── [958K] create_savedata.exe
│   │   ├── [ 18K] libtoxcore.def
│   │   ├── [2.6M] libtoxcore.dll
│   │   ├── [ 65K] libtoxcore.exp
│   │   ├── [428K] libtoxcore.lib
│   │   ├── [989K] save-generator.exe
│   │   ├── [381K] sign.exe
│   │   └── [408K] strkey.exe
│   ├── [4.0K] include
│   │   └── [4.0K] tox
│   │   ├── [177K] tox.h
│   │   ├── [ 10K] tox_dispatch.h
│   │   ├── [ 26K] tox_events.h
│   │   ├── [6.4K] tox_private.h
│   │   ├── [ 26K] toxav.h
│   │   └── [ 12K] toxencryptsave.h
│   └── [4.0K] lib
│   ├── [577K] libopus.a
│   ├── [660K] libsodium.a
│   ├── [ 10K] libssp.a
│   ├── [1016K] libtoxcore.a
│   ├── [456K] libtoxcore.dll.a
│   ├── [2.7M] libvpx.a
│   ├── [ 72K] libwinpthread.a
│   └── [4.0K] pkgconfig
│   ├── [ 250] libsodium.pc
│   ├── [ 357] opus.pc
│   ├── [ 247] toxcore.pc
│   └── [ 309] vpx.pc
└── [4.0K] x86_64
├── [ 36K] bin
│   ├── [504K] DHT_bootstrap.exe
│   ├── [474K] cracker.exe
│   ├── [277K] cracker_simple.exe
│   ├── [287K] create_bootstrap_keys.exe
│   ├── [288K] create_minimal_savedata.exe
│   ├── [769K] create_savedata.exe
│   ├── [ 18K] libtoxcore.def
│   ├── [2.4M] libtoxcore.dll
│   ├── [ 64K] libtoxcore.exp
│   ├── [420K] libtoxcore.lib
│   ├── [800K] save-generator.exe
│   ├── [289K] sign.exe
│   └── [317K] strkey.exe
├── [4.0K] include
│   └── [4.0K] tox
│   ├── [177K] tox.h
│   ├── [ 10K] tox_dispatch.h
│   ├── [ 26K] tox_events.h
│   ├── [6.4K] tox_private.h
│   ├── [ 26K] toxav.h
│   └── [ 12K] toxencryptsave.h
└── [4.0K] lib
├── [697K] libopus.a
├── [575K] libsodium.a
├── [ 11K] libssp.a
├── [905K] libtoxcore.a
├── [449K] libtoxcore.dll.a
├── [2.9M] libvpx.a
├── [ 68K] libwinpthread.a
└── [4.0K] pkgconfig
├── [ 252] libsodium.pc
├── [ 359] opus.pc
├── [ 249] toxcore.pc
└── [ 311] vpx.pc
12 directories, 60 files
```
- `libtoxcore.dll` is the shared library. It is fully self-contained, with no
additional dependencies aside from the Windows OS dlls, and can be used in
MSVC, MinGW, Clang, etc. projects. Despite its name, it provides toxcore,
toxav, toxencryptsave -- all of Tox.
- `libtoxcore.a` is the static library. In order to use it, it needs to be
linked against the other provided .a libraries (but not the .dll.a!) and
additionally -liphlpapi and -lws2_32 Windows dlls. It similarly provides all
of Tox APIs.
- `libtoxcore.dll.a` is the MinGW import library for `libtoxcore.dll`.
- `libtoxcore.lib` is the MSVC import library for `libtoxcore.dll`.
- `libtoxcore.exp` and `libtoxcore.def` are the exported by `libtoxcore`
symbols.
- `*.exe` are statically compiled executables -- `DHT_bootstrap` and
[the fun utils](#secondary).
## Pre-built binaries
### Linux
Toxcore is packaged by at least by the following distributions: ALT Linux,
[Arch Linux](https://www.archlinux.org/packages/?q=toxcore),
[Fedora](https://apps.fedoraproject.org/packages/toxcore), Mageia, openSUSE,
PCLinuxOS, ROSA and Slackware,
[according to the information from pkgs.org](https://pkgs.org/download/toxcore).
Note that this list might be incomplete and some other distributions might
package it too.

205
README.md
View File

@ -1,205 +1,4 @@
# ![Project Tox](https://raw.github.com/TokTok/c-toxcore/master/other/tox.png "Project Tox")
# Tomato
**Current Coverage:**
[![coverage](https://codecov.io/gh/TokTok/c-toxcore/branch/master/graph/badge.svg?token=BRfCKo02De)](https://codecov.io/gh/TokTok/c-toxcore)
![tomato](res/icon/tomato_v1_256.png)
[**Website**](https://tox.chat) **|** [**Wiki**](https://wiki.tox.chat/) **|**
[**Blog**](https://blog.tox.chat/) **|**
[**FAQ**](https://wiki.tox.chat/doku.php?id=users:faq) **|**
[**Binaries/Downloads**](https://tox.chat/download.html) **|**
[**Clients**](https://wiki.tox.chat/doku.php?id=clients) **|**
[**Compiling**](/INSTALL.md)
## What is Tox
Tox is a peer to peer (serverless) instant messenger aimed at making security
and privacy easy to obtain for regular users. It uses
[libsodium](https://doc.libsodium.org/) (based on
[NaCl](https://nacl.cr.yp.to/)) for its encryption and authentication.
## IMPORTANT!
### ![Danger: Experimental](other/tox-warning.png)
This is an **experimental** cryptographic network library. It has not been
formally audited by an independent third party that specializes in cryptography
or cryptanalysis. **Use this library at your own risk.**
The underlying crypto library [libsodium](https://doc.libsodium.org/) provides
reliable encryption, but the security model has not yet been fully specified.
See [issue 210](https://github.com/TokTok/c-toxcore/issues/210) for a discussion
on developing a threat model. See other issues for known weaknesses (e.g.
[issue 426](https://github.com/TokTok/c-toxcore/issues/426) describes what can
happen if your secret key is stolen).
## Toxcore Development Roadmap
The roadmap and changelog are generated from GitHub issues. You may view them on
the website, where they are updated at least once every 24 hours:
- Changelog: https://toktok.ltd/changelog/c-toxcore
- Roadmap: https://toktok.ltd/roadmap/c-toxcore
## Installing toxcore
Detailed installation instructions can be found in [INSTALL.md](INSTALL.md).
Be advised that due to the addition of `cmp` as a submodule, you now also need
to initialize the git submodules required by toxcore. This can be done by
cloning the repo with the following command:
`git clone --recurse-submodules https://github.com/Toktok/c-toxcore` or by
running `git submodule update --init` in the root directory of the repo.
In a nutshell, if you have [libsodium](https://github.com/jedisct1/libsodium)
installed, run:
```sh
mkdir _build && cd _build
cmake ..
make
sudo make install
```
If you have [libvpx](https://github.com/webmproject/libvpx) and
[opus](https://github.com/xiph/opus) installed, the above will also build the
A/V library for multimedia chats.
## Using toxcore
The simplest "hello world" example could be an echo bot. Here we will walk
through the implementation of a simple bot.
### Creating the tox instance
All toxcore API functions work with error parameters. They are enums with one
`OK` value and several error codes that describe the different situations in
which the function might fail.
```c
TOX_ERR_NEW err_new;
Tox *tox = tox_new(NULL, &err_new);
if (err_new != TOX_ERR_NEW_OK) {
fprintf(stderr, "tox_new failed with error code %d\n", err_new);
exit(1);
}
```
Here, we simply exit the program, but in a real client you will probably want to
do some error handling and proper error reporting to the user. The `NULL`
argument given to the first parameter of `tox_new` is the `Tox_Options`. It
contains various write-once network settings and allows you to load a previously
serialised instance. See [toxcore/tox.h](tox.h) for details.
### Setting up callbacks
Toxcore works with callbacks that you can register to listen for certain events.
Examples of such events are "friend request received" or "friend sent a
message". Search the API for `tox_callback_*` to find all of them.
Here, we will set up callbacks for receiving friend requests and receiving
messages. We will always accept any friend request (because we're a bot), and
when we receive a message, we send it back to the sender.
```c
tox_callback_friend_request(tox, handle_friend_request);
tox_callback_friend_message(tox, handle_friend_message);
```
These two function calls set up the callbacks. Now we also need to implement
these "handle" functions.
### Handle friend requests
```c
static void handle_friend_request(
Tox *tox, const uint8_t *public_key, const uint8_t *message, size_t length,
void *user_data) {
// Accept the friend request:
TOX_ERR_FRIEND_ADD err_friend_add;
tox_friend_add_norequest(tox, public_key, &err_friend_add);
if (err_friend_add != TOX_ERR_FRIEND_ADD_OK) {
fprintf(stderr, "unable to add friend: %d\n", err_friend_add);
}
}
```
The `tox_friend_add_norequest` function adds the friend without sending them a
friend request. Since we already got a friend request, this is the right thing
to do. If you wanted to send a friend request yourself, you would use
`tox_friend_add`, which has an extra parameter for the message.
### Handle messages
Now, when the friend sends us a message, we want to respond to them by sending
them the same message back. This will be our "echo".
```c
static void handle_friend_message(
Tox *tox, uint32_t friend_number, TOX_MESSAGE_TYPE type,
const uint8_t *message, size_t length,
void *user_data) {
TOX_ERR_FRIEND_SEND_MESSAGE err_send;
tox_friend_send_message(tox, friend_number, type, message, length,
&err_send);
if (err_send != TOX_ERR_FRIEND_SEND_MESSAGE_OK) {
fprintf(stderr, "unable to send message back to friend %d: %d\n",
friend_number, err_send);
}
}
```
That's it for the setup. Now we want to actually run the bot.
### Main event loop
Toxcore works with a main event loop function `tox_iterate` that you need to
call at a certain frequency dictated by `tox_iteration_interval`. This is a
polling function that receives new network messages and processes them.
```c
while (true) {
usleep(1000 * tox_iteration_interval(tox));
tox_iterate(tox, NULL);
}
```
That's it! Now you have a working echo bot. The only problem is that since Tox
works with public keys, and you can't really guess your bot's public key, you
can't add it as a friend in your client. For this, we need to call another API
function: `tox_self_get_address(tox, address)`. This will fill the 38 byte
friend address into the `address` buffer. You can then display that binary
string as hex and input it into your client. Writing a `bin2hex` function is
left as exercise for the reader.
We glossed over a lot of details, such as the user data which we passed to
`tox_iterate` (passing `NULL`), bootstrapping into an actual network (this bot
will work in the LAN, but not on an internet server) and the fact that we now
have no clean way of stopping the bot (`while (true)`). If you want to write a
real bot, you will probably want to read up on all the API functions. Consult
the API documentation in [toxcore/tox.h](toxcore/tox.h) for more information.
### Other resources
- [Another echo bot](https://wiki.tox.chat/developers/client_examples/echo_bot)
- [minitox](https://github.com/hqwrong/minitox) (A minimal tox client)
## SAST Tools
This project uses various tools supporting Static Application Security Testing:
- [clang-tidy](https://clang.llvm.org/extra/clang-tidy/): A clang-based C++
"linter" tool.
- [Coverity](https://scan.coverity.com/): A cloud-based static analyzer service
for Java, C/C++, C#, JavaScript, Ruby, or Python that is free for open source
projects.
- [cppcheck](https://cppcheck.sourceforge.io/): A static analyzer for C/C++
code.
- [cpplint](https://github.com/cpplint/cpplint): Static code checker for C++
- [goblint](https://goblint.in.tum.de/): A static analyzer for multi-threaded C
programs, specializing in finding concurrency bugs.
- [infer](https://github.com/facebook/infer): A static analyzer for Java, C,
C++, and Objective-C.
- [PVS-Studio](https://pvs-studio.com/en/pvs-studio/?utm_source=website&utm_medium=github&utm_campaign=open_source):
A static analyzer for C, C++, C#, and Java code.
- [tokstyle](https://github.com/TokTok/hs-tokstyle): A style checker for TokTok
C projects.

View File

@ -1,119 +0,0 @@
set(TEST_TIMEOUT_SECONDS "" CACHE STRING "Limit runtime of each test to the number of seconds specified")
add_library(auto_test_support
auto_test_support.c
auto_test_support.h)
target_link_libraries(auto_test_support PRIVATE misc_tools)
if(TARGET toxcore_static)
target_link_libraries(auto_test_support PRIVATE toxcore_static)
else()
target_link_libraries(auto_test_support PRIVATE toxcore_shared)
endif()
if(TARGET PThreads4W::PThreads4W)
target_link_libraries(auto_test_support PRIVATE PThreads4W::PThreads4W)
elseif(TARGET Threads::Threads)
target_link_libraries(auto_test_support PRIVATE Threads::Threads)
endif()
function(auto_test target)
add_executable(auto_${target}_test ${target}_test.c)
target_link_libraries(auto_${target}_test PRIVATE misc_tools auto_test_support)
if(TARGET toxcore_static)
target_link_libraries(auto_${target}_test PRIVATE toxcore_static)
else()
target_link_libraries(auto_${target}_test PRIVATE toxcore_shared)
endif()
if(TARGET PThreads4W::PThreads4W)
target_link_libraries(auto_${target}_test PRIVATE PThreads4W::PThreads4W)
elseif(TARGET Threads::Threads)
target_link_libraries(auto_${target}_test PRIVATE Threads::Threads)
endif()
add_test(NAME ${target} COMMAND ${CROSSCOMPILING_EMULATOR} auto_${target}_test)
set_tests_properties(${target} PROPERTIES TIMEOUT "${TEST_TIMEOUT_SECONDS}")
# add the source dir as environment variable, so the testdata can be found
set_tests_properties(${target} PROPERTIES ENVIRONMENT "LLVM_PROFILE_FILE=${target}.profraw;srcdir=${CMAKE_CURRENT_SOURCE_DIR}")
endfunction()
auto_test(TCP)
auto_test(announce)
auto_test(conference)
auto_test(conference_double_invite)
auto_test(conference_invite_merge)
auto_test(conference_peer_nick)
auto_test(conference_simple)
auto_test(conference_two)
auto_test(crypto)
#auto_test(dht) # Doesn't work with UNITY_BUILD.
auto_test(dht_getnodes_api)
auto_test(encryptsave)
auto_test(file_saving)
auto_test(file_streaming)
auto_test(file_transfer)
auto_test(forwarding)
auto_test(friend_connection)
auto_test(friend_request)
auto_test(friend_request_spam)
auto_test(group_general)
auto_test(group_invite)
auto_test(group_message)
auto_test(group_moderation)
auto_test(group_save)
auto_test(group_state)
auto_test(group_sync)
auto_test(group_tcp)
auto_test(group_topic)
auto_test(invalid_tcp_proxy)
auto_test(invalid_udp_proxy)
auto_test(lan_discovery)
auto_test(lossless_packet)
auto_test(lossy_packet)
auto_test(network)
auto_test(onion)
auto_test(overflow_recvq)
auto_test(overflow_sendq)
auto_test(reconnect)
auto_test(save_friend)
auto_test(save_load)
auto_test(send_message)
auto_test(set_name)
auto_test(set_status_message)
auto_test(tox_dispatch)
auto_test(tox_events)
auto_test(tox_many)
auto_test(tox_many_tcp)
auto_test(tox_strncasecmp)
auto_test(typing)
auto_test(version)
auto_test(save_compatibility)
target_include_directories(auto_encryptsave_test SYSTEM PRIVATE ${LIBSODIUM_INCLUDE_DIRS})
if(NON_HERMETIC_TESTS)
auto_test(bootstrap)
auto_test(tcp_relay)
endif()
if(BUILD_TOXAV)
auto_test(conference_av)
auto_test(toxav_basic)
auto_test(toxav_many)
if(MSVC)
target_link_libraries(auto_toxav_basic_test PRIVATE PkgConfig::VPX)
target_link_libraries(auto_toxav_many_test PRIVATE PkgConfig::VPX)
else()
target_link_libraries(auto_toxav_basic_test PRIVATE ${VPX_LIBRARIES})
target_link_directories(auto_toxav_basic_test PRIVATE ${VPX_LIBRARY_DIRS})
target_include_directories(auto_toxav_basic_test SYSTEM PRIVATE ${VPX_INCLUDE_DIRS})
target_compile_options(auto_toxav_basic_test PRIVATE ${VPX_CFLAGS_OTHER})
target_link_libraries(auto_toxav_many_test PRIVATE ${VPX_LIBRARIES})
target_link_directories(auto_toxav_many_test PRIVATE ${VPX_LIBRARY_DIRS})
target_include_directories(auto_toxav_many_test SYSTEM PRIVATE ${VPX_INCLUDE_DIRS})
target_compile_options(auto_toxav_many_test PRIVATE ${VPX_CFLAGS_OTHER})
endif()
endif()
if(PROXY_TEST)
auto_test(proxy)
endif()

View File

@ -1,268 +0,0 @@
#include <stdio.h>
#include <stdlib.h>
#include "../testing/misc_tools.h"
#include "../toxcore/tox.h"
#include "../toxcore/tox_dispatch.h"
#include "../toxcore/tox_events.h"
#include "auto_test_support.h"
#include "check_compat.h"
typedef struct State {
uint32_t id;
Tox *tox;
bool self_online;
bool friend_online;
bool invited_next;
bool joined;
uint32_t conference;
bool received;
uint32_t peers;
} State;
static void handle_self_connection_status(const Tox_Event_Self_Connection_Status *event, void *user_data)
{
State *state = (State *)user_data;
const Tox_Connection connection_status = tox_event_self_connection_status_get_connection_status(event);
fprintf(stderr, "self_connection_status(#%u, %d, _)\n", state->id, connection_status);
state->self_online = connection_status != TOX_CONNECTION_NONE;
}
static void handle_friend_connection_status(const Tox_Event_Friend_Connection_Status *event,
void *user_data)
{
State *state = (State *)user_data;
const uint32_t friend_number = tox_event_friend_connection_status_get_friend_number(event);
const Tox_Connection connection_status = tox_event_friend_connection_status_get_connection_status(event);
fprintf(stderr, "handle_friend_connection_status(#%u, %u, %d, _)\n", state->id, friend_number, connection_status);
state->friend_online = connection_status != TOX_CONNECTION_NONE;
}
static void handle_conference_invite(const Tox_Event_Conference_Invite *event, void *user_data)
{
State *state = (State *)user_data;
const uint32_t friend_number = tox_event_conference_invite_get_friend_number(event);
const Tox_Conference_Type type = tox_event_conference_invite_get_type(event);
const uint8_t *cookie = tox_event_conference_invite_get_cookie(event);
const size_t length = tox_event_conference_invite_get_cookie_length(event);
fprintf(stderr, "handle_conference_invite(#%u, %u, %d, uint8_t[%u], _)\n",
state->id, friend_number, type, (unsigned)length);
fprintf(stderr, "tox%u joining conference\n", state->id);
{
Tox_Err_Conference_Join err;
state->conference = tox_conference_join(state->tox, friend_number, cookie, length, &err);
ck_assert_msg(err == TOX_ERR_CONFERENCE_JOIN_OK, "failed to join a conference: err = %d", err);
fprintf(stderr, "tox%u Joined conference %u\n", state->id, state->conference);
state->joined = true;
}
}
static void handle_conference_message(const Tox_Event_Conference_Message *event, void *user_data)
{
State *state = (State *)user_data;
const uint32_t conference_number = tox_event_conference_message_get_conference_number(event);
const uint32_t peer_number = tox_event_conference_message_get_peer_number(event);
const Tox_Message_Type type = tox_event_conference_message_get_type(event);
const uint8_t *message = tox_event_conference_message_get_message(event);
const size_t length = tox_event_conference_message_get_message_length(event);
fprintf(stderr, "handle_conference_message(#%u, %u, %u, %d, uint8_t[%u], _)\n",
state->id, conference_number, peer_number, type, (unsigned)length);
fprintf(stderr, "tox%u got message: %s\n", state->id, (const char *)message);
state->received = true;
}
static void handle_conference_peer_list_changed(const Tox_Event_Conference_Peer_List_Changed *event, void *user_data)
{
State *state = (State *)user_data;
const uint32_t conference_number = tox_event_conference_peer_list_changed_get_conference_number(event);
fprintf(stderr, "handle_conference_peer_list_changed(#%u, %u, _)\n",
state->id, conference_number);
Tox_Err_Conference_Peer_Query err;
uint32_t count = tox_conference_peer_count(state->tox, conference_number, &err);
if (err != TOX_ERR_CONFERENCE_PEER_QUERY_OK) {
fprintf(stderr, "ERROR: %d\n", err);
exit(EXIT_FAILURE);
}
fprintf(stderr, "tox%u has %u peers online\n", state->id, count);
state->peers = count;
}
static void handle_conference_connected(const Tox_Event_Conference_Connected *event, void *user_data)
{
State *state = (State *)user_data;
// We're tox2, so now we invite tox3.
if (state->id == 2 && !state->invited_next) {
Tox_Err_Conference_Invite err;
tox_conference_invite(state->tox, 1, state->conference, &err);
ck_assert_msg(err == TOX_ERR_CONFERENCE_INVITE_OK, "tox2 failed to invite tox3: err = %d", err);
state->invited_next = true;
fprintf(stderr, "tox2 invited tox3\n");
}
}
static void iterate_one(
Tox *tox, State *state, const Tox_Dispatch *dispatch)
{
Tox_Err_Events_Iterate err;
Tox_Events *events = tox_events_iterate(tox, true, &err);
ck_assert(err == TOX_ERR_EVENTS_ITERATE_OK);
tox_dispatch_invoke(dispatch, events, state);
tox_events_free(events);
}
static void iterate3_wait(
State *state1, State *state2, State *state3,
const Tox_Dispatch *dispatch, int interval)
{
iterate_one(state1->tox, state1, dispatch);
iterate_one(state2->tox, state2, dispatch);
iterate_one(state3->tox, state3, dispatch);
c_sleep(interval);
}
int main(void)
{
setvbuf(stdout, nullptr, _IONBF, 0);
State state1 = {1};
State state2 = {2};
State state3 = {3};
// Create toxes.
state1.tox = tox_new_log(nullptr, nullptr, &state1.id);
state2.tox = tox_new_log(nullptr, nullptr, &state2.id);
state3.tox = tox_new_log(nullptr, nullptr, &state3.id);
tox_events_init(state1.tox);
tox_events_init(state2.tox);
tox_events_init(state3.tox);
// tox1 <-> tox2, tox2 <-> tox3
uint8_t key[TOX_PUBLIC_KEY_SIZE];
tox_self_get_public_key(state2.tox, key);
tox_friend_add_norequest(state1.tox, key, nullptr); // tox1 -> tox2
tox_self_get_public_key(state1.tox, key);
tox_friend_add_norequest(state2.tox, key, nullptr); // tox2 -> tox1
tox_self_get_public_key(state3.tox, key);
tox_friend_add_norequest(state2.tox, key, nullptr); // tox2 -> tox3
tox_self_get_public_key(state2.tox, key);
tox_friend_add_norequest(state3.tox, key, nullptr); // tox3 -> tox2
printf("bootstrapping tox2 and tox3 off tox1\n");
uint8_t dht_key[TOX_PUBLIC_KEY_SIZE];
tox_self_get_dht_id(state1.tox, dht_key);
const uint16_t dht_port = tox_self_get_udp_port(state1.tox, nullptr);
tox_bootstrap(state2.tox, "localhost", dht_port, dht_key, nullptr);
tox_bootstrap(state3.tox, "localhost", dht_port, dht_key, nullptr);
Tox_Dispatch *dispatch = tox_dispatch_new(nullptr);
ck_assert(dispatch != nullptr);
// Connection callbacks.
tox_events_callback_self_connection_status(dispatch, handle_self_connection_status);
tox_events_callback_friend_connection_status(dispatch, handle_friend_connection_status);
// Conference callbacks.
tox_events_callback_conference_invite(dispatch, handle_conference_invite);
tox_events_callback_conference_connected(dispatch, handle_conference_connected);
tox_events_callback_conference_message(dispatch, handle_conference_message);
tox_events_callback_conference_peer_list_changed(dispatch, handle_conference_peer_list_changed);
// Wait for self connection.
fprintf(stderr, "Waiting for toxes to come online\n");
do {
iterate3_wait(&state1, &state2, &state3, dispatch, 100);
} while (!state1.self_online || !state2.self_online || !state3.self_online);
fprintf(stderr, "Toxes are online\n");
// Wait for friend connection.
fprintf(stderr, "Waiting for friends to connect\n");
do {
iterate3_wait(&state1, &state2, &state3, dispatch, 100);
} while (!state1.friend_online || !state2.friend_online || !state3.friend_online);
fprintf(stderr, "Friends are connected\n");
{
// Create new conference, tox1 is the founder.
Tox_Err_Conference_New err;
state1.conference = tox_conference_new(state1.tox, &err);
state1.joined = true;
ck_assert_msg(err == TOX_ERR_CONFERENCE_NEW_OK, "failed to create a conference: err = %d", err);
fprintf(stderr, "Created conference: id = %u\n", state1.conference);
}
{
// Invite friend.
Tox_Err_Conference_Invite err;
tox_conference_invite(state1.tox, 0, state1.conference, &err);
ck_assert_msg(err == TOX_ERR_CONFERENCE_INVITE_OK, "failed to invite a friend: err = %d", err);
state1.invited_next = true;
fprintf(stderr, "tox1 invited tox2\n");
}
fprintf(stderr, "Waiting for invitation to arrive\n");
do {
iterate3_wait(&state1, &state2, &state3, dispatch, 100);
} while (!state1.joined || !state2.joined || !state3.joined);
fprintf(stderr, "Invitations accepted\n");
fprintf(stderr, "Waiting for peers to come online\n");
do {
iterate3_wait(&state1, &state2, &state3, dispatch, 100);
} while (state1.peers == 0 || state2.peers == 0 || state3.peers == 0);
fprintf(stderr, "All peers are online\n");
{
fprintf(stderr, "tox1 sends a message to the group: \"hello!\"\n");
Tox_Err_Conference_Send_Message err;
tox_conference_send_message(state1.tox, state1.conference, TOX_MESSAGE_TYPE_NORMAL,
(const uint8_t *)"hello!", 7, &err);
if (err != TOX_ERR_CONFERENCE_SEND_MESSAGE_OK) {
fprintf(stderr, "ERROR: %d\n", err);
exit(EXIT_FAILURE);
}
}
fprintf(stderr, "Waiting for messages to arrive\n");
do {
iterate3_wait(&state1, &state2, &state3, dispatch, 100);
c_sleep(100);
} while (!state2.received || !state3.received);
fprintf(stderr, "Messages received. Test complete.\n");
tox_dispatch_free(dispatch);
tox_kill(state3.tox);
tox_kill(state2.tox);
tox_kill(state1.tox);
return 0;
}

Binary file not shown.

View File

@ -1,272 +0,0 @@
/* File transfer test: streaming version (no known size).
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include "../testing/misc_tools.h"
#include "../toxcore/ccompat.h"
#include "../toxcore/tox.h"
#include "../toxcore/util.h"
#include "auto_test_support.h"
#include "check_compat.h"
#ifndef USE_IPV6
#define USE_IPV6 1
#endif
#ifdef TOX_LOCALHOST
#undef TOX_LOCALHOST
#endif
#if USE_IPV6
#define TOX_LOCALHOST "::1"
#else
#define TOX_LOCALHOST "127.0.0.1"
#endif
static void accept_friend_request(Tox *m, const uint8_t *public_key, const uint8_t *data, size_t length, void *userdata)
{
if (length == 7 && memcmp("Gentoo", data, 7) == 0) {
tox_friend_add_norequest(m, public_key, nullptr);
}
}
static uint64_t size_recv;
static uint64_t sending_pos;
static uint8_t file_cmp_id[TOX_FILE_ID_LENGTH];
static uint32_t file_accepted;
static uint64_t file_size;
static void tox_file_receive(Tox *tox, uint32_t friend_number, uint32_t file_number, uint32_t kind, uint64_t filesize,
const uint8_t *filename, size_t filename_length, void *userdata)
{
ck_assert_msg(kind == TOX_FILE_KIND_DATA, "bad kind");
ck_assert_msg(filename_length == sizeof("Gentoo.exe")
&& memcmp(filename, "Gentoo.exe", sizeof("Gentoo.exe")) == 0, "bad filename");
uint8_t file_id[TOX_FILE_ID_LENGTH];
ck_assert_msg(tox_file_get_file_id(tox, friend_number, file_number, file_id, nullptr), "tox_file_get_file_id error");
ck_assert_msg(memcmp(file_id, file_cmp_id, TOX_FILE_ID_LENGTH) == 0, "bad file_id");
const uint8_t empty[TOX_FILE_ID_LENGTH] = {0};
ck_assert_msg(memcmp(empty, file_cmp_id, TOX_FILE_ID_LENGTH) != 0, "empty file_id");
file_size = filesize;
if (filesize) {
sending_pos = size_recv = 1337;
Tox_Err_File_Seek err_s;
ck_assert_msg(tox_file_seek(tox, friend_number, file_number, 1337, &err_s), "tox_file_seek error");
ck_assert_msg(err_s == TOX_ERR_FILE_SEEK_OK, "tox_file_seek wrong error");
} else {
sending_pos = size_recv = 0;
}
Tox_Err_File_Control error;
ck_assert_msg(tox_file_control(tox, friend_number, file_number, TOX_FILE_CONTROL_RESUME, &error),
"tox_file_control failed. %i", error);
++file_accepted;
Tox_Err_File_Seek err_s;
ck_assert_msg(!tox_file_seek(tox, friend_number, file_number, 1234, &err_s), "tox_file_seek no error");
ck_assert_msg(err_s == TOX_ERR_FILE_SEEK_DENIED, "tox_file_seek wrong error");
}
static uint32_t sendf_ok;
static void file_print_control(Tox *tox, uint32_t friend_number, uint32_t file_number, Tox_File_Control control,
void *userdata)
{
/* First send file num is 0.*/
if (file_number == 0 && control == TOX_FILE_CONTROL_RESUME) {
sendf_ok = 1;
}
}
static uint64_t max_sending;
static bool m_send_reached;
static uint8_t sending_num;
static bool file_sending_done;
static void tox_file_chunk_request(Tox *tox, uint32_t friend_number, uint32_t file_number, uint64_t position,
size_t length, void *user_data)
{
ck_assert_msg(sendf_ok, "didn't get resume control");
ck_assert_msg(sending_pos == position, "bad position %lu", (unsigned long)position);
if (length == 0) {
ck_assert_msg(!file_sending_done, "file sending already done");
file_sending_done = 1;
return;
}
if (position + length > max_sending) {
ck_assert_msg(!m_send_reached, "requested done file transfer");
length = max_sending - position;
m_send_reached = 1;
}
VLA(uint8_t, f_data, length);
memset(f_data, sending_num, length);
Tox_Err_File_Send_Chunk error;
tox_file_send_chunk(tox, friend_number, file_number, position, f_data, length, &error);
ck_assert_msg(error == TOX_ERR_FILE_SEND_CHUNK_OK,
"could not send chunk, error num=%d pos=%d len=%d", (int)error, (int)position, (int)length);
++sending_num;
sending_pos += length;
}
static uint8_t num;
static bool file_recv;
static void write_file(Tox *tox, uint32_t friendnumber, uint32_t filenumber, uint64_t position, const uint8_t *data,
size_t length, void *user_data)
{
ck_assert_msg(size_recv == position, "bad position");
if (length == 0) {
file_recv = 1;
return;
}
VLA(uint8_t, f_data, length);
memset(f_data, num, length);
++num;
ck_assert_msg(memcmp(f_data, data, length) == 0, "FILE_CORRUPTED");
size_recv += length;
}
static void file_transfer_test(void)
{
printf("Starting test: few_clients\n");
uint32_t index[] = { 1, 2, 3 };
long long unsigned int cur_time = time(nullptr);
Tox_Err_New t_n_error;
Tox *tox1 = tox_new_log(nullptr, &t_n_error, &index[0]);
ck_assert_msg(t_n_error == TOX_ERR_NEW_OK, "wrong error");
Tox *tox2 = tox_new_log(nullptr, &t_n_error, &index[1]);
ck_assert_msg(t_n_error == TOX_ERR_NEW_OK, "wrong error");
Tox *tox3 = tox_new_log(nullptr, &t_n_error, &index[2]);
ck_assert_msg(t_n_error == TOX_ERR_NEW_OK, "wrong error");
ck_assert_msg(tox1 && tox2 && tox3, "Failed to create 3 tox instances");
tox_callback_friend_request(tox2, accept_friend_request);
uint8_t address[TOX_ADDRESS_SIZE];
tox_self_get_address(tox2, address);
uint32_t test = tox_friend_add(tox3, address, (const uint8_t *)"Gentoo", 7, nullptr);
ck_assert_msg(test == 0, "Failed to add friend error code: %u", test);
uint8_t dht_key[TOX_PUBLIC_KEY_SIZE];
tox_self_get_dht_id(tox1, dht_key);
uint16_t dht_port = tox_self_get_udp_port(tox1, nullptr);
tox_bootstrap(tox2, TOX_LOCALHOST, dht_port, dht_key, nullptr);
tox_bootstrap(tox3, TOX_LOCALHOST, dht_port, dht_key, nullptr);
printf("Waiting for toxes to come online\n");
do {
tox_iterate(tox1, nullptr);
tox_iterate(tox2, nullptr);
tox_iterate(tox3, nullptr);
printf("Connections: self (%d, %d, %d), friends (%d, %d)\n",
tox_self_get_connection_status(tox1),
tox_self_get_connection_status(tox2),
tox_self_get_connection_status(tox3),
tox_friend_get_connection_status(tox2, 0, nullptr),
tox_friend_get_connection_status(tox3, 0, nullptr));
c_sleep(ITERATION_INTERVAL);
} while (tox_self_get_connection_status(tox1) == TOX_CONNECTION_NONE ||
tox_self_get_connection_status(tox2) == TOX_CONNECTION_NONE ||
tox_self_get_connection_status(tox3) == TOX_CONNECTION_NONE ||
tox_friend_get_connection_status(tox2, 0, nullptr) == TOX_CONNECTION_NONE ||
tox_friend_get_connection_status(tox3, 0, nullptr) == TOX_CONNECTION_NONE);
printf("Starting file transfer test: 100MiB file.\n");
file_accepted = file_size = sendf_ok = size_recv = 0;
file_recv = 0;
max_sending = UINT64_MAX;
printf("Starting file streaming transfer test.\n");
file_sending_done = 0;
file_accepted = 0;
file_size = 0;
sendf_ok = 0;
size_recv = 0;
file_recv = 0;
tox_callback_file_recv_chunk(tox3, write_file);
tox_callback_file_recv_control(tox2, file_print_control);
tox_callback_file_chunk_request(tox2, tox_file_chunk_request);
tox_callback_file_recv_control(tox3, file_print_control);
tox_callback_file_recv(tox3, tox_file_receive);
const uint64_t totalf_size = UINT64_MAX;
Tox_File_Number fnum = tox_file_send(
tox2, 0, TOX_FILE_KIND_DATA, totalf_size, nullptr,
(const uint8_t *)"Gentoo.exe", sizeof("Gentoo.exe"), nullptr);
ck_assert_msg(fnum != UINT32_MAX, "tox_new_file_sender fail");
Tox_Err_File_Get gfierr;
ck_assert_msg(!tox_file_get_file_id(tox2, 1, fnum, file_cmp_id, &gfierr), "tox_file_get_file_id didn't fail");
ck_assert_msg(gfierr == TOX_ERR_FILE_GET_FRIEND_NOT_FOUND, "wrong error");
ck_assert_msg(!tox_file_get_file_id(tox2, 0, fnum + 1, file_cmp_id, &gfierr), "tox_file_get_file_id didn't fail");
ck_assert_msg(gfierr == TOX_ERR_FILE_GET_NOT_FOUND, "wrong error");
ck_assert_msg(tox_file_get_file_id(tox2, 0, fnum, file_cmp_id, &gfierr), "tox_file_get_file_id failed");
ck_assert_msg(gfierr == TOX_ERR_FILE_GET_OK, "wrong error");
max_sending = 100 * 1024;
m_send_reached = 0;
do {
tox_iterate(tox1, nullptr);
tox_iterate(tox2, nullptr);
tox_iterate(tox3, nullptr);
uint32_t tox1_interval = tox_iteration_interval(tox1);
uint32_t tox2_interval = tox_iteration_interval(tox2);
uint32_t tox3_interval = tox_iteration_interval(tox3);
c_sleep(min_u32(tox1_interval, min_u32(tox2_interval, tox3_interval)));
} while (!file_sending_done);
ck_assert_msg(sendf_ok && file_recv && m_send_reached && totalf_size == file_size && size_recv == max_sending
&& sending_pos == size_recv && file_accepted == 1,
"something went wrong in file transfer %u %u %u %u %u %u %u %lu %lu %lu %lu", sendf_ok, file_recv,
m_send_reached, totalf_size == file_size, size_recv == max_sending, sending_pos == size_recv, file_accepted == 1,
(unsigned long)totalf_size, (unsigned long)file_size,
(unsigned long)size_recv, (unsigned long)sending_pos);
printf("file_transfer_test succeeded, took %llu seconds\n", time(nullptr) - cur_time);
tox_kill(tox1);
tox_kill(tox2);
tox_kill(tox3);
}
int main(void)
{
setvbuf(stdout, nullptr, _IONBF, 0);
file_transfer_test();
return 0;
}

View File

@ -1,9 +0,0 @@
#include "../toxcore/tox.h"
#include "../toxcore/ccompat.h"
int main(void)
{
tox_kill(tox_new(nullptr, nullptr));
return 0;
}

View File

@ -1,15 +0,0 @@
pool:
vmImage: "windows-2019"
jobs:
- job: "windows_msvc_conan"
strategy:
matrix:
static:
conan.shared: "False"
shared:
conan.shared: "True"
steps:
- bash: python -m pip install conan==1.59.0
- bash: git submodule update --init --recursive
- bash: conan install -if _build -o with_tests=True -o shared=$(conan.shared) .
- bash: CONAN_CPU_COUNT=50 CTEST_OUTPUT_ON_FAILURE=1 conan build -bf _build -if _build . || true

View File

@ -1,36 +0,0 @@
###############################################################################
#
# :: For systems that have pkg-config.
#
###############################################################################
find_package(PkgConfig REQUIRED)
find_library(NSL_LIBRARIES nsl )
find_library(RT_LIBRARIES rt )
find_library(SOCKET_LIBRARIES socket)
find_package(pthreads QUIET)
if(NOT TARGET PThreads4W::PThreads4W)
set(THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads REQUIRED)
endif()
# For toxcore.
pkg_search_module(LIBSODIUM libsodium IMPORTED_TARGET REQUIRED)
if(MSVC)
find_package(unofficial-sodium REQUIRED)
endif()
# For toxav.
pkg_search_module(OPUS opus IMPORTED_TARGET)
if(NOT OPUS_FOUND)
pkg_search_module(OPUS Opus IMPORTED_TARGET)
endif()
pkg_search_module(VPX vpx IMPORTED_TARGET)
if(NOT VPX_FOUND)
pkg_search_module(VPX libvpx IMPORTED_TARGET)
endif()
# For tox-bootstrapd.
pkg_search_module(LIBCONFIG libconfig IMPORTED_TARGET)

View File

@ -1,65 +0,0 @@
option(ENABLE_SHARED "Build shared (dynamic) libraries for all modules" ON)
option(ENABLE_STATIC "Build static libraries for all modules" ON)
if(NOT ENABLE_SHARED AND NOT ENABLE_STATIC)
message(WARNING
"Both static and shared libraries are disabled; "
"enabling only shared libraries. Use -DENABLE_SHARED or -DENABLE_STATIC to "
"select one manually.")
set(ENABLE_SHARED ON)
endif()
option(FULLY_STATIC "Build fully static executables" OFF)
if(FULLY_STATIC)
set(CMAKE_EXE_LINKER_FLAGS "-static -no-pie")
# remove -Wl,-Bdynamic
set(CMAKE_EXE_LINK_DYNAMIC_C_FLAGS)
set(CMAKE_EXE_LINK_DYNAMIC_CXX_FLAGS)
set(ENABLE_SHARED OFF)
set(ENABLE_STATIC ON)
endif()
function(install_module lib)
if(TARGET ${lib}_shared)
set_target_properties(${lib}_shared PROPERTIES
VERSION ${SOVERSION}
SOVERSION ${SOVERSION_MAJOR}
)
install(TARGETS ${lib}_shared
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})
endif()
if(TARGET ${lib}_static)
install(TARGETS ${lib}_static
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})
endif()
string(REPLACE ";" " " ${lib}_PKGCONFIG_LIBS "${${lib}_PKGCONFIG_LIBS}")
string(REPLACE ";" " " ${lib}_PKGCONFIG_REQUIRES "${${lib}_PKGCONFIG_REQUIRES}")
configure_file(
"${${lib}_SOURCE_DIR}/other/pkgconfig/${lib}.pc.in"
"${CMAKE_BINARY_DIR}/${lib}.pc"
@ONLY
)
configure_file(
"${toxcore_SOURCE_DIR}/other/rpm/${lib}.spec.in"
"${CMAKE_BINARY_DIR}/${lib}.spec"
@ONLY
)
install(FILES
${CMAKE_BINARY_DIR}/${lib}.pc
DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
foreach(sublib ${${lib}_API_HEADERS})
string(REPLACE "^" ";" sublib ${sublib})
list(GET sublib 0 header)
install(FILES ${header} ${ARGN})
endforeach()
endfunction()

20
external/CMakeLists.txt vendored Normal file
View File

@ -0,0 +1,20 @@
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
add_subdirectory(./entt)
add_subdirectory(./solanaceae_util)
add_subdirectory(./solanaceae_contact)
add_subdirectory(./solanaceae_message3)
add_subdirectory(./solanaceae_plugin)
add_subdirectory(./toxcore)
add_subdirectory(./solanaceae_toxcore)
add_subdirectory(./solanaceae_tox)
add_subdirectory(./sdl)
add_subdirectory(./imgui)
add_subdirectory(./stb)
add_subdirectory(./libwebp)

4
external/entt/CMakeLists.txt vendored Normal file
View File

@ -0,0 +1,4 @@
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
add_subdirectory(./entt EXCLUDE_FROM_ALL)

41
external/entt/entt/.clang-format vendored Normal file
View File

@ -0,0 +1,41 @@
BasedOnStyle: llvm
---
AccessModifierOffset: -4
AlignEscapedNewlines: DontAlign
AllowShortBlocksOnASingleLine: Empty
AllowShortEnumsOnASingleLine: true
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: WithoutElse
AllowShortLoopsOnASingleLine: true
AlwaysBreakTemplateDeclarations: Yes
BreakBeforeBinaryOperators: NonAssignment
BreakBeforeTernaryOperators: true
ColumnLimit: 0
DerivePointerAlignment: false
IncludeCategories:
- Regex: '<[[:alnum:]_]+>'
Priority: 1
- Regex: '<(gtest|gmock)/'
Priority: 2
- Regex: '<[[:alnum:]_./]+>'
Priority: 3
- Regex: '<entt/'
Priority: 4
- Regex: '.*'
Priority: 5
IndentPPDirectives: AfterHash
IndentWidth: 4
KeepEmptyLinesAtTheStartOfBlocks: false
Language: Cpp
PointerAlignment: Right
SpaceAfterCStyleCast: false
SpaceAfterTemplateKeyword: false
SpaceAroundPointerQualifiers: After
SpaceBeforeCaseColon: false
SpaceBeforeCtorInitializerColon: false
SpaceBeforeInheritanceColon: false
SpaceBeforeParens: Never
SpaceBeforeRangeBasedForLoopColon: false
Standard: Latest
TabWidth: 4
UseTab: Never

View File

@ -0,0 +1,4 @@
# These are supported funding model platforms
github: skypjack
custom: https://www.paypal.me/skypjack

View File

@ -0,0 +1,55 @@
name: analyzer
on:
push:
branches:
- master
- wip
jobs:
iwyu:
timeout-minutes: 30
env:
IWYU: 0.18
LLVM: 14
runs-on: ubuntu-latest
continue-on-error: true
steps:
- uses: actions/checkout@v2
- name: Install llvm/clang
# see: https://apt.llvm.org/
run: |
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
sudo add-apt-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-$LLVM main"
sudo apt update
sudo apt remove -y "llvm*"
sudo apt remove -y "libclang-dev*"
sudo apt remove -y "clang*"
sudo apt install -y llvm-$LLVM-dev
sudo apt install -y libclang-$LLVM-dev
sudo apt install -y clang-$LLVM
- name: Compile iwyu
# see: https://github.com/include-what-you-use/include-what-you-use
working-directory: build
run: |
git clone https://github.com/include-what-you-use/include-what-you-use.git --branch $IWYU --depth 1
mkdir include-what-you-use/build
cd include-what-you-use/build
cmake -DCMAKE_C_COMPILER=clang-$LLVM -DCMAKE_CXX_COMPILER=clang++-$LLVM -DCMAKE_INSTALL_PREFIX=./ ..
make -j4
bin/include-what-you-use --version
- name: Compile tests
working-directory: build
run: |
export PATH=$PATH:${GITHUB_WORKSPACE}/build/include-what-you-use/build/bin
cmake -DENTT_BUILD_TESTING=ON \
-DENTT_BUILD_BENCHMARK=ON \
-DENTT_BUILD_EXAMPLE=ON \
-DENTT_BUILD_LIB=ON \
-DENTT_BUILD_SNAPSHOT=ON \
-DCMAKE_CXX_INCLUDE_WHAT_YOU_USE="include-what-you-use;-Xiwyu;--mapping_file=${GITHUB_WORKSPACE}/entt.imp;-Xiwyu;--no_fwd_decls;-Xiwyu;--verbose=1" ..
make -j4

View File

@ -0,0 +1,169 @@
name: build
on: [push, pull_request]
jobs:
linux:
timeout-minutes: 15
strategy:
matrix:
compiler:
- pkg: g++-7
exe: g++-7
- pkg: g++-8
exe: g++-8
- pkg: g++-9
exe: g++-9
- pkg: g++-10
exe: g++-10
- pkg: clang-8
exe: clang++-8
- pkg: clang-9
exe: clang++-9
- pkg: clang-10
exe: clang++-10
- pkg: clang-11
exe: clang++-11
- pkg: clang-12
exe: clang++-12
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install compiler
run: |
sudo apt update
sudo apt install -y ${{ matrix.compiler.pkg }}
- name: Compile tests
working-directory: build
env:
CXX: ${{ matrix.compiler.exe }}
run: |
cmake -DENTT_BUILD_TESTING=ON -DENTT_BUILD_LIB=ON -DENTT_BUILD_EXAMPLE=ON ..
make -j4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4
linux-extra:
timeout-minutes: 15
strategy:
matrix:
compiler: [g++, clang++]
id_type: ["std::uint32_t", "std::uint64_t"]
cxx_std: [cxx_std_17, cxx_std_20]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Compile tests
working-directory: build
env:
CXX: ${{ matrix.compiler }}
run: |
cmake -DENTT_BUILD_TESTING=ON -DENTT_CXX_STD=${{ matrix.cxx_std }} -DENTT_ID_TYPE=${{ matrix.id_type }} ..
make -j4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4
windows:
timeout-minutes: 15
strategy:
matrix:
toolset: [default, v141, v142, clang-cl]
include:
- toolset: v141
toolset_option: -T"v141"
- toolset: v142
toolset_option: -T"v142"
- toolset: clang-cl
toolset_option: -T"ClangCl"
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Compile tests
working-directory: build
run: |
cmake -DENTT_BUILD_TESTING=ON -DENTT_BUILD_LIB=ON -DENTT_BUILD_EXAMPLE=ON ${{ matrix.toolset_option }} ..
cmake --build . -j 4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4
windows-extra:
timeout-minutes: 15
strategy:
matrix:
id_type: ["std::uint32_t", "std::uint64_t"]
cxx_std: [cxx_std_17, cxx_std_20]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Compile tests
working-directory: build
run: |
cmake -DENTT_BUILD_TESTING=ON -DENTT_CXX_STD=${{ matrix.cxx_std }} -DENTT_ID_TYPE=${{ matrix.id_type }} ..
cmake --build . -j 4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4
macos:
timeout-minutes: 15
runs-on: macOS-latest
steps:
- uses: actions/checkout@v2
- name: Compile tests
working-directory: build
run: |
cmake -DENTT_BUILD_TESTING=ON -DENTT_BUILD_LIB=ON -DENTT_BUILD_EXAMPLE=ON ..
make -j4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4
macos-extra:
timeout-minutes: 15
strategy:
matrix:
id_type: ["std::uint32_t", "std::uint64_t"]
cxx_std: [cxx_std_17, cxx_std_20]
runs-on: macOS-latest
steps:
- uses: actions/checkout@v2
- name: Compile tests
working-directory: build
run: |
cmake -DENTT_BUILD_TESTING=ON -DENTT_CXX_STD=${{ matrix.cxx_std }} -DENTT_ID_TYPE=${{ matrix.id_type }} ..
make -j4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4

View File

@ -0,0 +1,38 @@
name: coverage
on: [push, pull_request]
jobs:
codecov:
timeout-minutes: 15
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Compile tests
working-directory: build
env:
CXXFLAGS: "--coverage -fno-inline"
CXX: g++
run: |
cmake -DENTT_BUILD_TESTING=ON -DENTT_BUILD_LIB=ON -DENTT_BUILD_EXAMPLE=ON ..
make -j4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4
- name: Collect data
working-directory: build
run: |
sudo apt install lcov
lcov -c -d . -o coverage.info
lcov -l coverage.info
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v2
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: build/coverage.info
name: EnTT
fail_ci_if_error: true

View File

@ -0,0 +1,39 @@
name: deploy
on:
release:
types: published
jobs:
homebrew-entt:
timeout-minutes: 5
runs-on: ubuntu-latest
env:
GH_REPO: homebrew-entt
FORMULA: entt.rb
steps:
- uses: actions/checkout@v2
- name: Clone repository
working-directory: build
env:
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
run: git clone https://$GITHUB_ACTOR:$PERSONAL_ACCESS_TOKEN@github.com/$GITHUB_ACTOR/$GH_REPO.git
- name: Prepare formula
working-directory: build
run: |
cd $GH_REPO
curl "https://github.com/${{ github.repository }}/archive/${{ github.ref }}.tar.gz" --location --fail --silent --show-error --output archive.tar.gz
sed -i -e '/url/s/".*"/"'$(echo "https://github.com/${{ github.repository }}/archive/${{ github.ref }}.tar.gz" | sed -e 's/[\/&]/\\&/g')'"/' $FORMULA
sed -i -e '/sha256/s/".*"/"'$(openssl sha256 archive.tar.gz | cut -d " " -f 2)'"/' $FORMULA
- name: Update remote
working-directory: build
run: |
cd $GH_REPO
git config --local user.email "action@github.com"
git config --local user.name "$GITHUB_ACTOR"
git add $FORMULA
git commit -m "Update to ${{ github.ref }}"
git push origin master

View File

@ -0,0 +1,31 @@
name: sanitizer
on: [push, pull_request]
jobs:
clang:
timeout-minutes: 15
strategy:
matrix:
compiler: [clang++]
id_type: ["std::uint32_t", "std::uint64_t"]
cxx_std: [cxx_std_17, cxx_std_20]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Compile tests
working-directory: build
env:
CXX: ${{ matrix.compiler }}
run: |
cmake -DENTT_USE_SANITIZER=ON -DENTT_BUILD_TESTING=ON -DENTT_BUILD_LIB=ON -DENTT_BUILD_EXAMPLE=ON -DENTT_CXX_STD=${{ matrix.cxx_std }} -DENTT_ID_TYPE=${{ matrix.id_type }} ..
make -j4
- name: Run tests
working-directory: build
env:
CTEST_OUTPUT_ON_FAILURE: 1
run: ctest --timeout 30 -C Debug -j4

13
external/entt/entt/.gitignore vendored Normal file
View File

@ -0,0 +1,13 @@
# Conan
conan/test_package/build
# IDEs
*.user
.idea
.vscode
.vs
CMakeSettings.json
cpp.hint
# Bazel
/bazel-*

56
external/entt/entt/AUTHORS vendored Normal file
View File

@ -0,0 +1,56 @@
# Author
skypjack
# Contributors
alexames
BenediktConze
bjadamson
ceeac
ColinH
corystegel
Croydon
cschreib
cugone
dbacchet
dBagrat
djarek
DNKpp
DonKult
drglove
eliasdaler
erez-o
eugeneko
gale83
ghost
grdowns
Green-Sky
Innokentiy-Alaytsev
Kerndog73
Koward
Lawrencemm
markand
mhammerc
Milerius
Minimonium
morbo84
m-waka
netpoetica
NixAJ
Oortonaut
Paolo-Oliverio
pgruenbacher
prowolf
Qix-
stefanofiorentino
suVrik
szunhammer
The5-1
vblanco20-1
willtunnels
WizardIke
WoLfulus
w1th0utnam3
xissburg
zaucy

14
external/entt/entt/BUILD.bazel vendored Normal file
View File

@ -0,0 +1,14 @@
_msvc_copts = ["/std:c++17"]
_gcc_copts = ["-std=c++17"]
cc_library(
name = "entt",
visibility = ["//visibility:public"],
strip_include_prefix = "src",
hdrs = glob(["src/**/*.h", "src/**/*.hpp"]),
copts = select({
"@bazel_tools//src/conditions:windows": _msvc_copts,
"@bazel_tools//src/conditions:windows_msvc": _msvc_copts,
"//conditions:default": _gcc_copts,
}),
)

325
external/entt/entt/CMakeLists.txt vendored Normal file
View File

@ -0,0 +1,325 @@
#
# EnTT
#
cmake_minimum_required(VERSION 3.12.4)
#
# Read project version
#
set(ENTT_VERSION_REGEX "#define ENTT_VERSION_.*[ \t]+(.+)")
file(STRINGS "${CMAKE_CURRENT_SOURCE_DIR}/src/entt/config/version.h" ENTT_VERSION REGEX ${ENTT_VERSION_REGEX})
list(TRANSFORM ENTT_VERSION REPLACE ${ENTT_VERSION_REGEX} "\\1")
string(JOIN "." ENTT_VERSION ${ENTT_VERSION})
#
# Project configuration
#
project(
EnTT
VERSION ${ENTT_VERSION}
DESCRIPTION "Gaming meets modern C++ - a fast and reliable entity-component system (ECS) and much more"
HOMEPAGE_URL "https://github.com/skypjack/entt"
LANGUAGES C CXX
)
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Debug)
endif()
message(VERBOSE "*")
message(VERBOSE "* ${PROJECT_NAME} v${PROJECT_VERSION} (${CMAKE_BUILD_TYPE})")
message(VERBOSE "* Copyright (c) 2017-2022 Michele Caini <michele.caini@gmail.com>")
message(VERBOSE "*")
#
# CMake stuff
#
list(INSERT CMAKE_MODULE_PATH 0 ${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules)
#
# Compiler stuff
#
option(ENTT_USE_LIBCPP "Use libc++ by adding -stdlib=libc++ flag if available." OFF)
option(ENTT_USE_SANITIZER "Enable sanitizers by adding -fsanitize=address -fno-omit-frame-pointer -fsanitize=undefined flags if available." OFF)
if(ENTT_USE_LIBCPP)
if(NOT WIN32)
include(CheckCXXSourceCompiles)
include(CMakePushCheckState)
cmake_push_check_state()
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -stdlib=libc++")
check_cxx_source_compiles("
#include<type_traits>
int main() { return std::is_same_v<int, char>; }
" ENTT_HAS_LIBCPP)
cmake_pop_check_state()
endif()
if(NOT ENTT_HAS_LIBCPP)
message(VERBOSE "The option ENTT_USE_LIBCPP is set but libc++ is not available. The flag will not be added to the target.")
endif()
endif()
if(ENTT_USE_SANITIZER)
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang|GNU")
set(ENTT_HAS_SANITIZER TRUE CACHE BOOL "" FORCE)
mark_as_advanced(ENTT_HAS_SANITIZER)
endif()
if(NOT ENTT_HAS_SANITIZER)
message(VERBOSE "The option ENTT_USE_SANITIZER is set but sanitizer support is not available. The flags will not be added to the target.")
endif()
endif()
#
# Add EnTT target
#
option(ENTT_INCLUDE_HEADERS "Add all EnTT headers to the EnTT target." OFF)
option(ENTT_INCLUDE_NATVIS "Add EnTT natvis files to the EnTT target." OFF)
if(ENTT_INCLUDE_NATVIS)
if(MSVC)
set(ENTT_HAS_NATVIS TRUE CACHE BOOL "" FORCE)
mark_as_advanced(ENTT_HAS_NATVIS)
endif()
if(NOT ENTT_HAS_NATVIS)
message(VERBOSE "The option ENTT_INCLUDE_NATVIS is set but natvis files are not supported. They will not be added to the target.")
endif()
endif()
include(GNUInstallDirs)
add_library(EnTT INTERFACE)
add_library(EnTT::EnTT ALIAS EnTT)
target_include_directories(
EnTT
INTERFACE
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
)
target_compile_features(EnTT INTERFACE cxx_std_17)
if(ENTT_INCLUDE_HEADERS)
target_sources(
EnTT
INTERFACE
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/config/config.h>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/config/macro.h>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/config/version.h>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/container/dense_map.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/container/dense_set.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/container/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/algorithm.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/any.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/attribute.h>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/compressed_pair.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/enum.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/family.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/hashed_string.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/ident.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/iterator.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/memory.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/monostate.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/tuple.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/type_info.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/type_traits.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/core/utility.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/component.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/entity.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/group.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/handle.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/helper.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/observer.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/organizer.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/registry.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/runtime_view.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/snapshot.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/sparse_set.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/storage.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/storage_mixin.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entity/view.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/graph/adjacency_matrix.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/graph/dot.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/graph/flow.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/graph/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/locator/locator.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/adl_pointer.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/container.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/context.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/factory.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/meta.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/node.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/pointer.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/policy.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/range.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/resolve.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/template.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/type_traits.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/meta/utility.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/platform/android-ndk-r17.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/poly/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/poly/poly.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/process/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/process/process.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/process/scheduler.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/resource/cache.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/resource/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/resource/loader.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/resource/resource.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/signal/delegate.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/signal/dispatcher.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/signal/emitter.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/signal/fwd.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/signal/sigh.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/entt.hpp>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/src/entt/fwd.hpp>
)
endif()
if(ENTT_HAS_NATVIS)
target_sources(
EnTT
INTERFACE
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/config.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/container.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/core.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/entity.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/graph.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/locator.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/meta.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/platform.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/poly.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/process.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/resource.natvis>
$<BUILD_INTERFACE:${EnTT_SOURCE_DIR}/natvis/entt/signal.natvis>
)
endif()
if(ENTT_HAS_SANITIZER)
target_compile_options(EnTT INTERFACE $<$<CONFIG:Debug>:-fsanitize=address -fno-omit-frame-pointer -fsanitize=undefined>)
target_link_libraries(EnTT INTERFACE $<$<CONFIG:Debug>:-fsanitize=address -fno-omit-frame-pointer -fsanitize=undefined>)
endif()
if(ENTT_HAS_LIBCPP)
target_compile_options(EnTT BEFORE INTERFACE -stdlib=libc++)
endif()
#
# Install pkg-config file
#
include(JoinPaths)
set(EnTT_PKGCONFIG ${CMAKE_CURRENT_BINARY_DIR}/entt.pc)
join_paths(EnTT_PKGCONFIG_INCLUDEDIR "\${prefix}" "${CMAKE_INSTALL_INCLUDEDIR}")
configure_file(
${EnTT_SOURCE_DIR}/cmake/in/entt.pc.in
${EnTT_PKGCONFIG}
@ONLY
)
install(
FILES ${EnTT_PKGCONFIG}
DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig
)
#
# Install EnTT
#
include(CMakePackageConfigHelpers)
install(
TARGETS EnTT
EXPORT EnTTTargets
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
)
write_basic_package_version_file(
EnTTConfigVersion.cmake
VERSION ${PROJECT_VERSION}
COMPATIBILITY AnyNewerVersion
)
configure_package_config_file(
${EnTT_SOURCE_DIR}/cmake/in/EnTTConfig.cmake.in
EnTTConfig.cmake
INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/EnTT/cmake
)
export(
EXPORT EnTTTargets
FILE ${CMAKE_CURRENT_BINARY_DIR}/EnTTTargets.cmake
NAMESPACE EnTT::
)
install(
EXPORT EnTTTargets
FILE EnTTTargets.cmake
DESTINATION ${CMAKE_INSTALL_LIBDIR}/EnTT/cmake
NAMESPACE EnTT::
)
install(
FILES
${PROJECT_BINARY_DIR}/EnTTConfig.cmake
${PROJECT_BINARY_DIR}/EnTTConfigVersion.cmake
DESTINATION ${CMAKE_INSTALL_LIBDIR}/EnTT/cmake
)
install(DIRECTORY src/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
export(PACKAGE EnTT)
#
# Tests
#
option(ENTT_BUILD_TESTING "Enable building tests." OFF)
if(ENTT_BUILD_TESTING)
option(ENTT_FIND_GTEST_PACKAGE "Enable finding gtest package." OFF)
option(ENTT_BUILD_BENCHMARK "Build benchmark." OFF)
option(ENTT_BUILD_EXAMPLE "Build examples." OFF)
option(ENTT_BUILD_LIB "Build lib tests." OFF)
option(ENTT_BUILD_SNAPSHOT "Build snapshot test with Cereal." OFF)
set(ENTT_ID_TYPE std::uint32_t CACHE STRING "Type of identifiers to use for the tests")
set(ENTT_CXX_STD cxx_std_17 CACHE STRING "C++ standard revision to use for the tests")
include(CTest)
enable_testing()
add_subdirectory(test)
endif()
#
# Documentation
#
option(ENTT_BUILD_DOCS "Enable building with documentation." OFF)
if(ENTT_BUILD_DOCS)
find_package(Doxygen 1.8)
if(DOXYGEN_FOUND)
add_subdirectory(docs)
endif()
endif()

43
external/entt/entt/CONTRIBUTING.md vendored Normal file
View File

@ -0,0 +1,43 @@
# Contributing
First of all, thank you very much for taking the time to contribute to the
`EnTT` library.<br/>
How to do it mostly depends on the type of contribution:
* If you have a question, **please** ensure there isn't already an answer for
you by searching on GitHub under
[issues](https://github.com/skypjack/entt/issues). Do not forget to search
also through the closed ones. If you are unable to find a proper answer, feel
free to [open a new issue](https://github.com/skypjack/entt/issues/new).
Usually, questions are marked as such and closed in a few days.
* If you want to fix a typo in the inline documentation or in the README file,
if you want to add some new sections or if you want to help me with the
language by reviewing what I wrote so far (I'm not a native speaker after
all), **please** open a new
[pull request](https://github.com/skypjack/entt/pulls) with your changes.
* If you found a bug, **please** ensure there isn't already an answer for you by
searching on GitHub under [issues](https://github.com/skypjack/entt/issues).
If you are unable to find an open issue addressing the problem, feel free to
[open a new one](https://github.com/skypjack/entt/issues/new). **Please**, do
not forget to carefully describe how to reproduce the problem, then add all
the information about the system on which you are experiencing it and point
out the version of `EnTT` you are using (tag or commit).
* If you found a bug and you wrote a patch to fix it, open a new
[pull request](https://github.com/skypjack/entt/pulls) with your code.
**Please**, add some tests to avoid regressions in future if possible, it
would be really appreciated. Note that the `EnTT` library has a
[coverage at 100%](https://coveralls.io/github/skypjack/entt?branch=master)
(at least it was at 100% at the time I wrote this file) and this is the reason
for which you can be confident with using it in a production environment.
* If you want to propose a new feature and you know how to code it, **please**
do not issue directly a pull request. Before to do it,
[create a new issue](https://github.com/skypjack/entt/issues/new) to discuss
your proposal. Other users could be interested in your idea and the discussion
that will follow can refine it and therefore give us a better solution.
* If you want to request a new feature, I'm available for hiring. Take a look at
[my profile](https://github.com/skypjack) and feel free to write me.

21
external/entt/entt/LICENSE vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2017-2022 Michele Caini, author of EnTT
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copy of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copy or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

431
external/entt/entt/README.md vendored Normal file
View File

@ -0,0 +1,431 @@
![EnTT: Gaming meets modern C++](https://user-images.githubusercontent.com/1812216/103550016-90752280-4ea8-11eb-8667-12ed2219e137.png)
<!--
@cond TURN_OFF_DOXYGEN
-->
[![Build Status](https://github.com/skypjack/entt/workflows/build/badge.svg)](https://github.com/skypjack/entt/actions)
[![Coverage](https://codecov.io/gh/skypjack/entt/branch/master/graph/badge.svg)](https://codecov.io/gh/skypjack/entt)
[![Try online](https://img.shields.io/badge/try-online-brightgreen)](https://godbolt.org/z/zxW73f)
[![Documentation](https://img.shields.io/badge/docs-docsforge-blue)](http://entt.docsforge.com/)
[![Gitter chat](https://badges.gitter.im/skypjack/entt.png)](https://gitter.im/skypjack/entt)
[![Discord channel](https://img.shields.io/discord/707607951396962417?logo=discord)](https://discord.gg/5BjPWBd)
[![Donate](https://img.shields.io/badge/donate-paypal-blue.svg)](https://www.paypal.me/skypjack)
> `EnTT` has been a dream so far, we haven't found a single bug to date and it's
> super easy to work with
>
> -- Every EnTT User Ever
`EnTT` is a header-only, tiny and easy to use library for game programming and
much more written in **modern C++**.<br/>
[Among others](https://github.com/skypjack/entt/wiki/EnTT-in-Action), it's used
in [**Minecraft**](https://minecraft.net/en-us/attribution/) by Mojang, the
[**ArcGIS Runtime SDKs**](https://developers.arcgis.com/arcgis-runtime/) by Esri
and the amazing [**Ragdoll**](https://ragdolldynamics.com/).<br/>
If you don't see your project in the list, please open an issue, submit a PR or
add the [#entt](https://github.com/topics/entt) tag to your _topics_! :+1:
---
Do you want to **keep up with changes** or do you have a **question** that
doesn't require you to open an issue?<br/>
Join the [gitter channel](https://gitter.im/skypjack/entt) and the
[discord server](https://discord.gg/5BjPWBd), meet other users like you. The
more we are, the better for everyone.<br/>
Don't forget to check the
[FAQs](https://github.com/skypjack/entt/wiki/Frequently-Asked-Questions) and the
[wiki](https://github.com/skypjack/entt/wiki) too. Your answers may already be
there.
Do you want to support `EnTT`? Consider becoming a
[**sponsor**](https://github.com/users/skypjack/sponsorship).
Many thanks to [these people](https://skypjack.github.io/sponsorship/) and
**special** thanks to:
[![mojang](https://user-images.githubusercontent.com/1812216/106253145-67ca1980-6217-11eb-9c0b-d93561b37098.png)](https://mojang.com)
[![imgly](https://user-images.githubusercontent.com/1812216/106253726-271ed000-6218-11eb-98e0-c9c681925770.png)](https://img.ly/)
# Table of Contents
* [Introduction](#introduction)
* [Code Example](#code-example)
* [Motivation](#motivation)
* [Performance](#performance)
* [Integration](#integration)
* [Requirements](#requirements)
* [CMake](#cmake)
* [Natvis support](#natvis-support)
* [Packaging Tools](#packaging-tools)
* [pkg-config](#pkg-config)
* [Documentation](#documentation)
* [Tests](#tests)
* [EnTT in Action](#entt-in-action)
* [Contributors](#contributors)
* [License](#license)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
The entity-component-system (also known as _ECS_) is an architectural pattern
used mostly in game development. For further details:
* [Entity Systems Wiki](http://entity-systems.wikidot.com/)
* [Evolve Your Hierarchy](http://cowboyprogramming.com/2007/01/05/evolve-your-heirachy/)
* [ECS on Wikipedia](https://en.wikipedia.org/wiki/Entity%E2%80%93component%E2%80%93system)
This project started off as a pure entity-component system. Over time the
codebase has grown as more and more classes and functionalities were added.<br/>
Here is a brief, yet incomplete list of what it offers today:
* Built-in **RTTI system** mostly similar to the standard one.
* A `constexpr` utility for human-readable **resource names**.
* Minimal **configuration system** built using the monostate pattern.
* Incredibly fast **entity-component system** with its own _pay for what you
use_ policy, unconstrained component types with optional pointer stability and
hooks for storage customization.
* Views and groups to iterate entities and components and allow different access
patterns, from **perfect SoA** to fully random.
* A lot of **facilities** built on top of the entity-component system to help
the users and avoid reinventing the wheel.
* General purpose **execution graph builder** for optimal scheduling.
* The smallest and most basic implementation of a **service locator** ever seen.
* A built-in, non-intrusive and macro-free runtime **reflection system**.
* **Static polymorphism** made simple and within everyone's reach.
* A few homemade containers, like a sparse set based **hash map**.
* A **cooperative scheduler** for processes of any type.
* All that is needed for **resource management** (cache, loaders, handles).
* Delegates, **signal handlers** and a tiny event dispatcher.
* A general purpose **event emitter** as a CRTP idiom based class template.
* And **much more**! Check out the
[**wiki**](https://github.com/skypjack/entt/wiki).
Consider this list a work in progress as well as the project. The whole API is
fully documented in-code for those who are brave enough to read it.<br/>
Please, do note that all tools are also DLL-friendly now and run smoothly across
boundaries.
One thing known to most is that `EnTT` is also used in **Minecraft**.<br/>
Given that the game is available literally everywhere, I can confidently say
that the library has been sufficiently tested on every platform that can come to
mind.
## Code Example
```cpp
#include <entt/entt.hpp>
struct position {
float x;
float y;
};
struct velocity {
float dx;
float dy;
};
void update(entt::registry &registry) {
auto view = registry.view<const position, velocity>();
// use a callback
view.each([](const auto &pos, auto &vel) { /* ... */ });
// use an extended callback
view.each([](const auto entity, const auto &pos, auto &vel) { /* ... */ });
// use a range-for
for(auto [entity, pos, vel]: view.each()) {
// ...
}
// use forward iterators and get only the components of interest
for(auto entity: view) {
auto &vel = view.get<velocity>(entity);
// ...
}
}
int main() {
entt::registry registry;
for(auto i = 0u; i < 10u; ++i) {
const auto entity = registry.create();
registry.emplace<position>(entity, i * 1.f, i * 1.f);
if(i % 2 == 0) { registry.emplace<velocity>(entity, i * .1f, i * .1f); }
}
update(registry);
}
```
## Motivation
I started developing `EnTT` for the _wrong_ reason: my goal was to design an
entity-component system to beat another well known open source library both in
terms of performance and possibly memory usage.<br/>
In the end, I did it, but it wasn't very satisfying. Actually it wasn't
satisfying at all. The fastest and nothing more, fairly little indeed. When I
realized it, I tried hard to keep intact the great performance of `EnTT` and to
add all the features I wanted to see in *my own library* at the same time.
Nowadays, `EnTT` is finally what I was looking for: still faster than its
_competitors_, lower memory usage in the average case, a really good API and an
amazing set of features. And even more, of course.
## Performance
The proposed entity-component system is incredibly fast to iterate entities and
components, this is a fact. Some compilers make a lot of optimizations because
of how `EnTT` works, some others aren't that good. In general, if we consider
real world cases, `EnTT` is somewhere between a bit and much faster than many of
the other solutions around, although I couldn't check them all for obvious
reasons.
If you are interested, you can compile the `benchmark` test in release mode (to
enable compiler optimizations, otherwise it would make little sense) by setting
the `ENTT_BUILD_BENCHMARK` option of `CMake` to `ON`, then evaluate yourself
whether you're satisfied with the results or not.
Honestly I got tired of updating the README file whenever there is an
improvement.<br/>
There are already a lot of projects out there that use `EnTT` as a basis for
comparison (this should already tell you a lot). Many of these benchmarks are
completely wrong, many others are simply incomplete, good at omitting some
information and using the wrong function to compare a given feature. Certainly
there are also good ones but they age quickly if nobody updates them, especially
when the library they are dealing with is actively developed.
The choice to use `EnTT` should be based on its carefully designed API, its
set of features and the general performance, **not** because some single
benchmark shows it to be the fastest tool available.
In the future I'll likely try to get even better performance while still adding
new features, mainly for fun.<br/>
If you want to contribute and/or have suggestions, feel free to make a PR or
open an issue to discuss your idea.
# Integration
`EnTT` is a header-only library. This means that including the `entt.hpp` header
is enough to include the library as a whole and use it. For those who are
interested only in the entity-component system, consider to include the sole
`entity/registry.hpp` header instead.<br/>
It's a matter of adding the following line to the top of a file:
```cpp
#include <entt/entt.hpp>
```
Use the line below to include only the entity-component system instead:
```cpp
#include <entt/entity/registry.hpp>
```
Then pass the proper `-I` argument to the compiler to add the `src` directory to
the include paths.
## Requirements
To be able to use `EnTT`, users must provide a full-featured compiler that
supports at least C++17.<br/>
The requirements below are mandatory to compile the tests and to extract the
documentation:
* `CMake` version 3.7 or later.
* `Doxygen` version 1.8 or later.
Alternatively, [Bazel](https://bazel.build) is also supported as a build system
(credits to [zaucy](https://github.com/zaucy) who offered to maintain it).<br/>
In the documentation below I'll still refer to `CMake`, this being the official
build system of the library.
## CMake
To use `EnTT` from a `CMake` project, just link an existing target to the
`EnTT::EnTT` alias.<br/>
The library offers everything you need for locating (as in `find_package`),
embedding (as in `add_subdirectory`), fetching (as in `FetchContent`) or using
it in many of the ways that you can think of and that involve `CMake`.<br/>
Covering all possible cases would require a treaty and not a simple README file,
but I'm confident that anyone reading this section also knows what it's about
and can use `EnTT` from a `CMake` project without problems.
## Natvis support
When using `CMake`, just enable the option `ENTT_INCLUDE_NATVIS` and enjoy
it.<br/>
Otherwise, most of the tools are covered via Natvis and all files can be found
in the `natvis` directory, divided by module.<br/>
If you spot errors or have suggestions, any contribution is welcome!
## Packaging Tools
`EnTT` is available for some of the most known packaging tools. In particular:
* [`Conan`](https://github.com/conan-io/conan-center-index), the C/C++ Package
Manager for Developers.
* [`vcpkg`](https://github.com/Microsoft/vcpkg), Microsoft VC++ Packaging
Tool.<br/>
You can download and install `EnTT` in just a few simple steps:
```
$ git clone https://github.com/Microsoft/vcpkg.git
$ cd vcpkg
$ ./bootstrap-vcpkg.sh
$ ./vcpkg integrate install
$ vcpkg install entt
```
Or you can use the `experimental` feature to test the latest changes:
```
vcpkg install entt[experimental] --head
```
The `EnTT` port in `vcpkg` is kept up to date by Microsoft team members and
community contributors.<br/>
If the version is out of date, please
[create an issue or pull request](https://github.com/Microsoft/vcpkg) on the
`vcpkg` repository.
* [`Homebrew`](https://github.com/skypjack/homebrew-entt), the missing package
manager for macOS.<br/>
Available as a homebrew formula. Just type the following to install it:
```
brew install skypjack/entt/entt
```
* [`build2`](https://build2.org), build toolchain for developing and packaging C
and C++ code.<br/>
In order to use the [`entt`](https://cppget.org/entt) package in a `build2`
project, add the following line or a similar one to the `manifest` file:
```
depends: entt ^3.0.0
```
Also check that the configuration refers to a valid repository, so that the
package can be found by `build2`:
* [`cppget.org`](https://cppget.org), the open-source community central
repository, accessible as `https://pkg.cppget.org/1/stable`.
* [Package source repository](https://github.com/build2-packaging/entt):
accessible as either `https://github.com/build2-packaging/entt.git` or
`ssh://git@github.com/build2-packaging/entt.git`.
Feel free to [report issues](https://github.com/build2-packaging/entt) with
this package.
Both can be used with `bpkg add-repo` or added in a project
`repositories.manifest`. See the official
[documentation](https://build2.org/build2-toolchain/doc/build2-toolchain-intro.xhtml#guide-repositories)
for more details.
Consider this list a work in progress and help me to make it longer if you like.
## pkg-config
`EnTT` also supports `pkg-config` (for some definition of _supports_ at least).
A suitable file called `entt.pc` is generated and installed in a proper
directory when running `CMake`.<br/>
This should also make it easier to use with tools such as `Meson` or similar.
# Documentation
The documentation is based on [doxygen](http://www.doxygen.nl/). To build it:
$ cd build
$ cmake .. -DENTT_BUILD_DOCS=ON
$ make
The API reference will be created in HTML format within the directory
`build/docs/html`. To navigate it with your favorite browser:
$ cd build
$ your_favorite_browser docs/html/index.html
<!--
@cond TURN_OFF_DOXYGEN
-->
The same version is also available [online](https://skypjack.github.io/entt/)
for the latest release, that is the last stable tag. If you are looking for
something more pleasing to the eye, consider reading the nice-looking version
available on [docsforge](https://entt.docsforge.com/): same documentation, much
more pleasant to read.<br/>
Moreover, there exists a [wiki](https://github.com/skypjack/entt/wiki) dedicated
to the project where users can find all related documentation pages.
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Tests
To compile and run the tests, `EnTT` requires *googletest*.<br/>
`cmake` will download and compile the library before compiling anything else.
In order to build the tests, set the `CMake` option `ENTT_BUILD_TESTING` to
`ON`.
To build the most basic set of tests:
* `$ cd build`
* `$ cmake -DENTT_BUILD_TESTING=ON ..`
* `$ make`
* `$ make test`
Note that benchmarks are not part of this set.
<!--
@cond TURN_OFF_DOXYGEN
-->
# EnTT in Action
`EnTT` is widely used in private and commercial applications. I cannot even
mention most of them because of some signatures I put on some documents time
ago. Fortunately, there are also people who took the time to implement open
source projects based on `EnTT` and did not hold back when it came to
documenting them.
[Here](https://github.com/skypjack/entt/wiki/EnTT-in-Action) you can find an
incomplete list of games, applications and articles that can be used as a
reference.
If you know of other resources out there that are about `EnTT`, feel free to
open an issue or a PR and I'll be glad to add them to the list.
# Contributors
Requests for features, PRs, suggestions ad feedback are highly appreciated.
If you find you can help and want to contribute to the project with your
experience or you do want to get part of the project for some other reason, feel
free to contact me directly (you can find the mail in the
[profile](https://github.com/skypjack)).<br/>
I can't promise that each and every contribution will be accepted, but I can
assure that I'll do my best to take them all as soon as possible.
If you decide to participate, please see the guidelines for
[contributing](CONTRIBUTING.md) before to create issues or pull
requests.<br/>
Take also a look at the
[contributors list](https://github.com/skypjack/entt/blob/master/AUTHORS) to
know who has participated so far.
<!--
@endcond TURN_OFF_DOXYGEN
-->
# License
Code and documentation Copyright (c) 2017-2022 Michele Caini.<br/>
Colorful logo Copyright (c) 2018-2021 Richard Caseres.
Code released under
[the MIT license](https://github.com/skypjack/entt/blob/master/LICENSE).<br/>
Documentation released under
[CC BY 4.0](https://creativecommons.org/licenses/by/4.0/).<br/>
All logos released under
[CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/).

27
external/entt/entt/TODO vendored Normal file
View File

@ -0,0 +1,27 @@
* debugging tools (#60): the issue online already contains interesting tips on this, look at it
* work stealing job system (see #100) + mt scheduler based on const awareness for types
EXAMPLES
* filter on runtime values/variables (not only types)
* support to polymorphic types (see #859)
DOC:
* storage<void>
* custom storage/view
* examples (and credits) from @alanjfs :)
* update entity doc when the storage based model is in place
TODO (high prio):
* remove the static storage from the const assure in the registry
WIP:
* get rid of observers, storage based views made them pointless - document alternatives
* add storage getter for filters to views and groups
* exploit the tombstone mechanism to allow enabling/disabling entities (see bump, compact and clear for further details)
* basic_storage::bind for cross-registry setups (see and remove todo from entity_copy.cpp)
* process scheduler: reviews, use free lists internally
* dedicated entity storage, in-place O(1) release/destroy for non-orphaned entities, out-of-sync model
* entity-only and exclude-only views (both solved with entity storage and storage<void>)
* custom allocators all over (registry, ...)
* add test for maximum number of entities reached
* deprecate non-owning groups in favor of owning views and view packs, introduce lazy owning views

1
external/entt/entt/WORKSPACE vendored Normal file
View File

@ -0,0 +1 @@
workspace(name = "com_github_skypjack_entt")

2
external/entt/entt/build/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
*
!.gitignore

View File

@ -0,0 +1,5 @@
@PACKAGE_INIT@
set(EnTT_VERSION "@PROJECT_VERSION@")
include("${CMAKE_CURRENT_LIST_DIR}/EnTTTargets.cmake")
check_required_components("@PROJECT_NAME@")

View File

@ -0,0 +1,8 @@
prefix=@CMAKE_INSTALL_PREFIX@
includedir=@EnTT_PKGCONFIG_INCLUDEDIR@
Name: EnTT
Description: Gaming meets modern C++
Url: https://github.com/skypjack/entt
Version: @ENTT_VERSION@
Cflags: -I${includedir}

View File

@ -0,0 +1,23 @@
# This module provides function for joining paths
# known from most languages
#
# SPDX-License-Identifier: (MIT OR CC0-1.0)
# Copyright 2020 Jan Tojnar
# https://github.com/jtojnar/cmake-snips
#
# Modelled after Python<6F>s os.path.join
# https://docs.python.org/3.7/library/os.path.html#os.path.join
# Windows not supported
function(join_paths joined_path first_path_segment)
set(temp_path "${first_path_segment}")
foreach(current_segment IN LISTS ARGN)
if(NOT ("${current_segment}" STREQUAL ""))
if(IS_ABSOLUTE "${current_segment}")
set(temp_path "${current_segment}")
else()
set(temp_path "${temp_path}/${current_segment}")
endif()
endif()
endforeach()
set(${joined_path} "${temp_path}" PARENT_SCOPE)
endfunction()

37
external/entt/entt/conan/build.py vendored Normal file
View File

@ -0,0 +1,37 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from cpt.packager import ConanMultiPackager
import os
if __name__ == "__main__":
username = os.getenv("GITHUB_ACTOR")
tag_version = os.getenv("GITHUB_REF")
tag_package = os.getenv("GITHUB_REPOSITORY")
login_username = os.getenv("CONAN_LOGIN_USERNAME")
package_version = tag_version.replace("refs/tags/v", "")
package_name = tag_package.replace("skypjack/", "")
reference = "{}/{}".format(package_name, package_version)
channel = os.getenv("CONAN_CHANNEL", "stable")
upload = os.getenv("CONAN_UPLOAD")
stable_branch_pattern = os.getenv("CONAN_STABLE_BRANCH_PATTERN", r"v\d+\.\d+\.\d+.*")
test_folder = os.getenv("CPT_TEST_FOLDER", os.path.join("conan", "test_package"))
upload_only_when_stable = os.getenv("CONAN_UPLOAD_ONLY_WHEN_STABLE", True)
disable_shared = os.getenv("CONAN_DISABLE_SHARED_BUILD", "False")
builder = ConanMultiPackager(username=username,
reference=reference,
channel=channel,
login_username=login_username,
upload=upload,
stable_branch_pattern=stable_branch_pattern,
upload_only_when_stable=upload_only_when_stable,
test_folder=test_folder)
builder.add()
filtered_builds = []
for settings, options, env_vars, build_requires, reference in builder.items:
if disable_shared == "False" or not options["{}:shared".format(package_name)]:
filtered_builds.append([settings, options, env_vars, build_requires])
builder.builds = filtered_builds
builder.run()

7
external/entt/entt/conan/ci/build.sh vendored Normal file
View File

@ -0,0 +1,7 @@
#!/bin/bash
set -e
set -x
conan user
python conan/build.py

View File

@ -0,0 +1,6 @@
#!/bin/bash
set -e
set -x
pip install -U conan_package_tools conan

View File

@ -0,0 +1,13 @@
cmake_minimum_required(VERSION 3.7.2)
project(test_package)
set(CMAKE_VERBOSE_MAKEFILE TRUE)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)
conan_basic_setup()
add_executable(${PROJECT_NAME} test_package.cpp)
target_link_libraries(${PROJECT_NAME} ${CONAN_LIBS})

View File

@ -0,0 +1,19 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
import os
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
bin_path = os.path.join("bin", "test_package")
self.run(bin_path, run_environment=True)

View File

@ -0,0 +1,56 @@
#include <entt/entt.hpp>
#include <cstdint>
struct position {
float x;
float y;
};
struct velocity {
float dx;
float dy;
};
void update(entt::registry &registry) {
auto view = registry.view<position, velocity>();
for(auto entity: view) {
// gets only the components that are going to be used ...
auto &vel = view.get<velocity>(entity);
vel.dx = 0.;
vel.dy = 0.;
// ...
}
}
void update(std::uint64_t dt, entt::registry &registry) {
registry.view<position, velocity>().each([dt](auto &pos, auto &vel) {
// gets all the components of the view at once ...
pos.x += vel.dx * dt;
pos.y += vel.dy * dt;
// ...
});
}
int main() {
entt::registry registry;
std::uint64_t dt = 16;
for(auto i = 0; i < 10; ++i) {
auto entity = registry.create();
registry.emplace<position>(entity, i * 1.f, i * 1.f);
if(i % 2 == 0) { registry.emplace<velocity>(entity, i * .1f, i * .1f); }
}
update(dt, registry);
update(registry);
// ...
return EXIT_SUCCESS;
}

27
external/entt/entt/conanfile.py vendored Normal file
View File

@ -0,0 +1,27 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile
class EnttConan(ConanFile):
name = "entt"
description = "Gaming meets modern C++ - a fast and reliable entity-component system (ECS) and much more "
topics = ("conan," "entt", "gaming", "entity", "ecs")
url = "https://github.com/skypjack/entt"
homepage = url
author = "Michele Caini <michele.caini@gmail.com>"
license = "MIT"
exports = ["LICENSE"]
exports_sources = ["src/*"]
no_copy_source = True
def package(self):
self.copy(pattern="LICENSE", dst="licenses")
self.copy(pattern="*", dst="include", src="src", keep_path=True)
def package_info(self):
if not self.in_local_cache:
self.cpp_info.includedirs = ["src"]
def package_id(self):
self.info.header_only()

40
external/entt/entt/docs/CMakeLists.txt vendored Normal file
View File

@ -0,0 +1,40 @@
#
# Doxygen configuration (documentation)
#
set(DOXY_DEPS_DIRECTORY ${EnTT_SOURCE_DIR}/deps)
set(DOXY_SOURCE_DIRECTORY ${EnTT_SOURCE_DIR}/src)
set(DOXY_DOCS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
set(DOXY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
configure_file(doxy.in doxy.cfg @ONLY)
add_custom_target(
docs ALL
COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/doxy.cfg
WORKING_DIRECTORY ${EnTT_SOURCE_DIR}
VERBATIM
SOURCES
dox/extra.dox
md/config.md
md/container.md
md/core.md
md/entity.md
md/faq.md
md/lib.md
md/links.md
md/locator.md
md/meta.md
md/poly.md
md/process.md
md/reference.md
md/resource.md
md/signal.md
md/unreal.md
doxy.in
)
install(
DIRECTORY ${DOXY_OUTPUT_DIRECTORY}/html
DESTINATION share/${PROJECT_NAME}-${PROJECT_VERSION}/
)

5
external/entt/entt/docs/dox/extra.dox vendored Normal file
View File

@ -0,0 +1,5 @@
/**
* @namespace entt
*
* @brief `EnTT` default namespace.
*/

2682
external/entt/entt/docs/doxy.in vendored Normal file

File diff suppressed because it is too large Load Diff

121
external/entt/entt/docs/md/config.md vendored Normal file
View File

@ -0,0 +1,121 @@
# Crash Course: configuration
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [Definitions](#definitions)
* [ENTT_NOEXCEPTION](#entt_noexception)
* [ENTT_USE_ATOMIC](#entt_use_atomic)
* [ENTT_ID_TYPE](#entt_id_type)
* [ENTT_SPARSE_PAGE](#entt_sparse_page)
* [ENTT_PACKED_PAGE](#entt_packed_page)
* [ENTT_ASSERT](#entt_assert)
* [ENTT_ASSERT_CONSTEXPR](#entt_assert_constexpr)
* [ENTT_DISABLE_ASSERT](#entt_disable_assert)
* [ENTT_NO_ETO](#entt_no_eto)
* [ENTT_STANDARD_CPP](#entt_standard_cpp)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
`EnTT` has become almost completely customizable over time, in many
respects. These variables are just one of the many ways to customize how it
works.<br/>
In the vast majority of cases, users will have no interest in changing the
default parameters. For all other cases, the list of possible configurations
with which it's possible to adjust the behavior of the library at runtime can be
found below.
# Definitions
All options are intended as parameters to the compiler (or user-defined macros
within the compilation units, if preferred).<br/>
Each parameter can result in internal library definitions. It's not recommended
to try to also modify these definitions, since there is no guarantee that they
will remain stable over time unlike the options below.
## ENTT_NOEXCEPTION
Define this variable without assigning any value to it to turn off exception
handling in `EnTT`.<br/>
This is roughly equivalent to setting the compiler flag `-fno-exceptions` but is
also limited to this library only.
## ENTT_USE_ATOMIC
In general, `EnTT` doesn't offer primitives to support multi-threading. Many of
the features can be split over multiple threads without any explicit control and
the user is the one who knows if a synchronization point is required.<br/>
However, some features aren't easily accessible to users and are made
thread-safe by means of this definition.
## ENTT_ID_TYPE
`entt::id_type` is directly controlled by this definition and widely used within
the library.<br/>
By default, its type is `std::uint32_t`. However, users can define a different
default type if necessary.
## ENTT_SPARSE_PAGE
It's known that the ECS module of `EnTT` is based on _sparse sets_. What is less
known perhaps is that the sparse arrays are paged to reduce memory usage.<br/>
Default size of pages (that is, the number of elements they contain) is 4096 but
users can adjust it if appropriate. In all case, the chosen value **must** be a
power of 2.
## ENTT_PACKED_PAGE
As it happens with sparse arrays, packed arrays are also paginated. However, in
this case the aim isn't to reduce memory usage but to have pointer stability
upon component creation.<br/>
Default size of pages (that is, the number of elements they contain) is 1024 but
users can adjust it if appropriate. In all case, the chosen value **must** be a
power of 2.
## ENTT_ASSERT
For performance reasons, `EnTT` doesn't use exceptions or any other control
structures. In fact, it offers many features that result in undefined behavior
if not used correctly.<br/>
To get around this, the library relies on a lot of asserts for the purpose of
detecting errors in debug builds. By default, it uses `assert` internally. Users
are allowed to overwrite its behavior by setting this variable.
### ENTT_ASSERT_CONSTEXPR
Usually, an assert within a `constexpr` function isn't a big deal. However, in
case of extreme customizations, it might be useful to differentiate.<br/>
For this purpose, `EnTT` introduces an admittedly badly named variable to make
the job easier in this regard. By default, this variable forwards its arguments
to `ENTT_ASSERT`.
### ENTT_DISABLE_ASSERT
Assertions may in turn affect performance to an extent when enabled. Whether
`ENTT_ASSERT` and `ENTT_ASSERT_CONSTEXPR` are redefined or not, all asserts can
be disabled at once by means of this definition.<br/>
Note that `ENTT_DISABLE_ASSERT` takes precedence over the redefinition of the
other variables and is therefore meant to disable all controls no matter what.
## ENTT_NO_ETO
In order to reduce memory consumption and increase performance, empty types are
never instantiated nor stored by the ECS module of `EnTT`.<br/>
Use this variable to treat these types like all others and therefore to create a
dedicated storage for them.
## ENTT_STANDARD_CPP
`EnTT` mixes non-standard language features with others that are perfectly
compliant to offer some of its functionalities.<br/>
This definition prevents the library from using non-standard techniques, that
is, functionalities that aren't fully compliant with the standard C++.<br/>
While there are no known portability issues at the time of this writing, this
should make the library fully portable anyway if needed.

67
external/entt/entt/docs/md/container.md vendored Normal file
View File

@ -0,0 +1,67 @@
# Crash Course: containers
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [Containers](#containers)
* [Dense map](#dense-map)
* [Dense set](#dense-set)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
The standard C++ library offers a wide range of containers and it's really
difficult to do better (although it's very easy to do worse, as many examples
available online demonstrate).<br/>
`EnTT` doesn't try in any way to replace what is offered by the standard. Quite
the opposite, given the widespread use that is made of standard containers.<br/>
However, the library also tries to fill a gap in features and functionality by
making available some containers initially developed for internal use.
This section of the library is likely to grow larger over time. However, for the
moment it's quite small and mainly aimed at satisfying some internal needs.<br/>
For all containers made available, full test coverage and stability over time is
guaranteed as usual.
# Containers
## Dense map
The dense map made available in `EnTT` is a hash map that aims to return a
packed array of elements, so as to reduce the number of jumps in memory during
iterations.<br/>
The implementation is based on _sparse sets_ and each bucket is identified by an
implicit list within the packed array itself.
The interface is very close to its counterpart in the standard library, that is,
`std::unordered_map`.<br/>
However, both local and non-local iterators returned by a dense map belong to
the input iterator category although they respectively model the concepts of a
_forward iterator_ type and a _random access iterator_ type.<br/>
This is because they return a pair of references rather than a reference to a
pair. In other words, dense maps return a so called _proxy iterator_ the value
type of which is:
* `std::pair<const Key &, Type &>` for non-const iterator types.
* `std::pair<const Key &, const Type &>` for const iterator types.
This is quite different from what any standard library map returns and should be
taken into account when looking for a drop-in replacement.
## Dense set
The dense set made available in `EnTT` is a hash set that aims to return a
packed array of elements, so as to reduce the number of jumps in memory during
iterations.<br/>
The implementation is based on _sparse sets_ and each bucket is identified by an
implicit list within the packed array itself.
The interface is in all respects similar to its counterpart in the standard
library, that is, `std::unordered_set`.<br/>
Therefore, there is no need to go into the API description.

1011
external/entt/entt/docs/md/core.md vendored Normal file

File diff suppressed because it is too large Load Diff

2252
external/entt/entt/docs/md/entity.md vendored Normal file

File diff suppressed because it is too large Load Diff

215
external/entt/entt/docs/md/faq.md vendored Normal file
View File

@ -0,0 +1,215 @@
# Frequently Asked Questions
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [FAQ](#faq)
* [Why is my debug build on Windows so slow?](#why-is-my-debug-build-on-windows-so-slow)
* [How can I represent hierarchies with my components?](#how-can-i-represent-hierarchies-with-my-components)
* [Custom entity identifiers: yay or nay?](#custom-entity-identifiers-yay-or-nay)
* [Warning C4307: integral constant overflow](#warning-C4307-integral-constant-overflow)
* [Warning C4003: the min, the max and the macro](#warning-C4003-the-min-the-max-and-the-macro)
* [The standard and the non-copyable types](#the-standard-and-the-non-copyable-types)
* [Which functions trigger which signals](#which-functions-trigger-which-signals)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
This is a constantly updated section where I'm trying to put the answers to the
most frequently asked questions.<br/>
If you don't find your answer here, there are two cases: nobody has done it yet
or this section needs updating. In both cases, you can
[open a new issue](https://github.com/skypjack/entt/issues/new) or enter either
the [gitter channel](https://gitter.im/skypjack/entt) or the
[discord server](https://discord.gg/5BjPWBd) to ask for help.<br/>
Probably someone already has an answer for you and we can then integrate this
part of the documentation.
# FAQ
## Why is my debug build on Windows so slow?
`EnTT` is an experimental project that I also use to keep me up-to-date with the
latest revision of the language and the standard library. For this reason, it's
likely that some classes you're working with are using standard containers under
the hood.<br/>
Unfortunately, it's known that the standard containers aren't particularly
performing in debugging (the reasons for this go beyond this document) and are
even less so on Windows apparently. Fortunately this can also be mitigated a
lot, achieving good results in many cases.
First of all, there are two things to do in a Windows project:
* Disable the [`/JMC`](https://docs.microsoft.com/cpp/build/reference/jmc)
option (_Just My Code_ debugging), available starting with Visual Studio 2017
version 15.8.
* Set the [`_ITERATOR_DEBUG_LEVEL`](https://docs.microsoft.com/cpp/standard-library/iterator-debug-level)
macro to 0. This will disable checked iterators and iterator debugging.
Moreover, set the `ENTT_DISABLE_ASSERT` variable or redefine the `ENTT_ASSERT`
macro to disable internal debug checks in `EnTT`:
```cpp
#define ENTT_ASSERT(...) ((void)0)
```
These asserts are introduced to help the users but they require to access to the
underlying containers and therefore risk ruining the performance in some cases.
With these changes, debug performance should increase enough in most cases. If
you want something more, you can also switch to an optimization level `O0` or
preferably `O1`.
## How can I represent hierarchies with my components?
This is one of the first questions that anyone makes when starting to work with
the entity-component-system architectural pattern.<br/>
There are several approaches to the problem and the best one depends mainly on
the real problem one is facing. In all cases, how to do it doesn't strictly
depend on the library in use, but the latter certainly allows or not different
techniques depending on how the data are laid out.
I tried to describe some of the approaches that fit well with the model of
`EnTT`. [This](https://skypjack.github.io/2019-06-25-ecs-baf-part-4/) is the
first post of a series that tries to _explore_ the problem. More will probably
come in future.<br/>
In addition, `EnTT` also offers the possibility to create stable storage types
and therefore have pointer stability for one, all or some components. This is by
far the most convenient solution when it comes to creating hierarchies and
whatnot. See the documentation for the ECS part of the library and in particular
what concerns the `component_traits` class for further details.
## Custom entity identifiers: yay or nay?
Custom entity identifiers are definitely a good idea in two cases at least:
* If `std::uint32_t` isn't large enough for your purposes, since this is the
underlying type of `entt::entity`.
* If you want to avoid conflicts when using multiple registries.
Identifiers can be defined through enum classes and class types that define an
`entity_type` member of type `std::uint32_t` or `std::uint64_t`.<br/>
In fact, this is a definition equivalent to that of `entt::entity`:
```cpp
enum class entity: std::uint32_t {};
```
There is no limit to the number of identifiers that can be defined.
## Warning C4307: integral constant overflow
According to [this](https://github.com/skypjack/entt/issues/121) issue, using a
hashed string under VS (toolset v141) could generate a warning.<br/>
First of all, I want to reassure you: it's expected and harmless. However, it
can be annoying.
To suppress it and if you don't want to suppress all the other warnings as well,
here is a workaround in the form of a macro:
```cpp
#if defined(_MSC_VER)
#define HS(str) __pragma(warning(suppress:4307)) entt::hashed_string{str}
#else
#define HS(str) entt::hashed_string{str}
#endif
```
With an example of use included:
```cpp
constexpr auto identifier = HS("my/resource/identifier");
```
Thanks to [huwpascoe](https://github.com/huwpascoe) for the courtesy.
## Warning C4003: the min, the max and the macro
On Windows, a header file defines two macros `min` and `max` which may result in
conflicts with their counterparts in the standard library and therefore in
errors during compilation.
It's a pretty big problem but fortunately it's not a problem of `EnTT` and there
is a fairly simple solution to it.<br/>
It consists in defining the `NOMINMAX` macro before including any other header
so as to get rid of the extra definitions:
```cpp
#define NOMINMAX
```
Please refer to [this](https://github.com/skypjack/entt/issues/96) issue for
more details.
## The standard and the non-copyable types
`EnTT` uses internally the trait `std::is_copy_constructible_v` to check if a
component is actually copyable. However, this trait doesn't really check whether
a type is actually copyable. Instead, it just checks that a suitable copy
constructor and copy operator exist.<br/>
This can lead to surprising results due to some idiosyncrasies of the standard.
For example, `std::vector` defines a copy constructor that is conditionally
enabled depending on whether the value type is copyable or not. As a result,
`std::is_copy_constructible_v` returns true for the following specialization:
```cpp
struct type {
std::vector<std::unique_ptr<action>> vec;
};
```
However, the copy constructor is effectively disabled upon specialization.
Therefore, trying to assign an instance of this type to an entity may trigger a
compilation error.<br/>
As a workaround, users can mark the type explicitly as non-copyable. This also
suppresses the implicit generation of the move constructor and operator, which
will therefore have to be defaulted accordingly:
```cpp
struct type {
type(const type &) = delete;
type(type &&) = default;
type & operator=(const type &) = delete;
type & operator=(type &&) = default;
std::vector<std::unique_ptr<action>> vec;
};
```
Note that aggregate initialization is also disabled as a consequence.<br/>
Fortunately, this type of trick is quite rare. The bad news is that there is no
way to deal with it at the library level, this being due to the design of the
language. On the other hand, the fact that the language itself also offers a way
to mitigate the problem makes it manageable.
## Which functions trigger which signals
The `registry` class offers three signals that are emitted following specific
operations. Maybe not everyone knows what these operations are, though.<br/>
If this isn't clear, below you can find a _vademecum_ for this purpose:
* `on_created` is invoked when a component is first added (neither modified nor
replaced) to an entity.
* `on_update` is called whenever an existing component is modified or replaced.
* `on_destroyed` is called when a component is explicitly or implicitly removed
from an entity.
Among the most controversial functions can be found `emplace_or_replace` and
`destroy`. However, following the above rules, it's quite simple to know what
will happen.<br/>
In the first case, `on_created` is invoked if the entity has not the component,
otherwise the latter is replaced and therefore `on_update` is triggered. As for
the second case, components are removed from their entities and thus freed when
they are recycled. It means that `on_destroyed` is triggered for every component
owned by the entity that is destroyed.

299
external/entt/entt/docs/md/graph.md vendored Normal file
View File

@ -0,0 +1,299 @@
# Crash Course: graph
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [Data structures](#data-structures)
* [Adjacency matrix](#adjacency-matrix)
* [Graphviz dot language](#graphviz-dot-language)
* [Flow builder](#flow-builder)
* [Tasks and resources](#tasks-and-resources)
* [Fake resources and order of execution](#fake-resources-and-order-of-execution)
* [Sync points](#sync-points)
* [Execution graph](#execution-graph)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
`EnTT` doesn't aim to offer everything one needs to work with graphs. Therefore,
anyone looking for this in the _graph_ submodule will be disappointed.<br/>
Quite the opposite is true. This submodule is minimal and contains only the data
structures and algorithms strictly necessary for the development of some tools
such as the _flow builder_.
# Data structures
As anticipated in the introduction, the aim isn't to offer all possible data
structures suitable for representing and working with graphs. Many will likely
be added or refined over time, however I want to discourage anyone expecting
tight scheduling on the subject.<br/>
The data structures presented in this section are mainly useful for the
development and support of some tools which are also part of the same submodule.
## Adjacency matrix
The adjacency matrix is designed to represent either a directed or an undirected
graph:
```cpp
entt::adjacency_matrix<entt::directed_tag> adjacency_matrix{};
```
The `directed_tag` type _creates_ the graph as directed. There is also an
`undirected_tag` counterpart which creates it as undirected.<br/>
The interface deviates slightly from the typical double indexing of C and offers
an API that is perhaps more familiar to a C++ programmer. Therefore, the access
and modification of an element will take place via the `contains`, `insert` and
`erase` functions rather than a double call to an `operator[]`:
```cpp
if(adjacency_matrix.contains(0u, 1u)) {
adjacency_matrix.erase(0u, 1u);
} else {
adjacency_matrix.insert(0u, 1u);
}
```
Both `insert` and` erase` are idempotent functions which have no effect if the
element already exists or has already been deleted.<br/>
The first one returns an `std::pair` containing the iterator to the element and
a boolean value indicating whether the element has been inserted or was already
present. The second one instead returns the number of deleted elements (0 or 1).
An adjacency matrix must be initialized with the number of elements (vertices)
when constructing it but can also be resized later using the `resize` function:
```cpp
entt::adjacency_matrix<entt::directed_tag> adjacency_matrix{3u};
```
To visit all vertices, the class offers a function named `vertices` that returns
an iterable object suitable for the purpose:
```cpp
for(auto &&vertex: adjacency_matrix.vertices()) {
// ...
}
```
Note that the same result can be obtained with the following snippet, since the
vertices are unsigned integral values:
```cpp
for(auto last = adjacency_matrix.size(), pos = {}; pos < last; ++pos) {
// ...
}
```
As for visiting the edges, a few functions are available.<br/>
When the purpose is to visit all the edges of a given adjacency matrix, the
`edges` function returns an iterable object that can be used to get them as
pairs of vertices:
```cpp
for(auto [lhs, rhs]: adjacency_matrix.edges()) {
// ...
}
```
On the other hand, if the goal is to visit all the in- or out-edges of a given
vertex, the `in_edges` and `out_edges` functions are meant for that:
```cpp
for(auto [lhs, rhs]: adjacency_matrix.out_edges(3u)) {
// ...
}
```
As might be expected, these functions expect the vertex to visit (that is, to
return the in- or out-edges for) as an argument.<br/>
Finally, the adjacency matrix is an allocator-aware container and offers most of
the functionality one would expect from this type of containers, such as `clear`
or 'get_allocator` and so on.
## Graphviz dot language
As it's one of the most popular formats, the library offers minimal support for
converting a graph to a Graphviz dot snippet.<br/>
The simplest way is to pass both an output stream and a graph to the `dot`
function:
```cpp
std::ostringstream output{};
entt::dot(output, adjacency_matrix);
```
However, there is also the option of providing a callback to which the vertices
are passed and which can be used to add (`dot`) properties to the output from
time to time:
```cpp
std::ostringstream output{};
entt::dot(output, adjacency_matrix, [](auto &output, auto vertex) {
out << "label=\"v\"" << vertex << ",shape=\"box\"";
});
```
This second mode is particularly convenient when the user wants to associate
data managed externally to the graph being converted.
# Flow builder
A flow builder is used to create execution graphs from tasks and resources.<br/>
The implementation is as generic as possible and doesn't bind to any other part
of the library.
This class is designed as a sort of _state machine_ to which a specific task is
attached for which the resources accessed in read-only or read-write mode are
specified.<br/>
Most of the functions in the API also return the flow builder itself, according
to what is the common sense API when it comes to builder classes.
Once all tasks have been registered and resources assigned to them, an execution
graph in the form of an adjacency matrix is returned to the user.<br/>
This graph contains all the tasks assigned to the flow builder in the form of
_vertices_. The _vertex_ itself can be used as an index to get the identifier
passed during registration.
## Tasks and resources
Although these terms are used extensively in the documentation, the flow builder
has no real concept of tasks and resources.<br/>
This class works mainly with _identifiers_, that is, values of type `id_type`.
That is, both tasks and resources are identified by integral values.<br/>
This allows not to couple the class itself to the rest of the library or to any
particular data structure. On the other hand, it requires the user to keep track
of the association between identifiers and operations or actual data.
Once a flow builder has been created (which requires no constructor arguments),
the first thing to do is to bind a task. This will indicate to the builder who
intends to consume the resources that will be specified immediately after:
```cpp
entt::flow builder{};
builder.bind("task_1"_hs);
```
Note that the example uses the `EnTT` hashed string to generate an identifier
for the task.<br/>
Indeed, the use of `id_type` as an identifier type is not by accident. In fact,
it matches well with the internal hashed string class. Moreover, it's also the
same type returned by the hash function of the internal RTTI system, in case the
user wants to rely on that.<br/>
However, being an integral value, it leaves the user full freedom to rely on his
own tools if he deems it necessary.
Once a task has been associated with the flow builder, it can be assigned
read-only or read-write resources, as appropriate:
```cpp
builder
.bind("task_1"_hs)
.ro("resource_1"_hs)
.ro("resource_2"_hs)
.bind("task_2"_hs)
.rw("resource_2"_hs)
```
As mentioned, many functions return the builder itself and it's therefore easy
to concatenate the different calls.<br/>
Also in the case of resources, these are identified by numeric values of type
`id_type`. As above, the choice is not entirely random. This goes well with the
tools offered by the library while leaving room for maximum flexibility.
Finally, both the `ro` and` rw` functions also offer an overload that accepts a
pair of iterators, so that one can pass a range of resources in one go.
## Fake resources and order of execution
The flow builder doesn't offer the ability to specify when a task should execute
before or after another task.<br/>
In fact, the order of _registration_ on the resources also determines the order
in which the tasks are processed during the generation of the execution graph.
However, there is a way to force the execution order of two processes.<br/>
Briefly, since accessing a resource in opposite modes requires sequential rather
than parallel scheduling, it's possible to make use of fake resources to force
the order execution:
```cpp
builder
.bind("task_1"_hs)
.ro("resource_1"_hs)
.rw("fake"_hs)
.bind("task_2"_hs)
.ro("resource_2"_hs)
.ro("fake"_hs)
.bind("task_3"_hs)
.ro("resource_2"_hs)
.ro("fake"_hs)
```
This snippet forces the execution of `task_2` and `task_3` **after** `task_1`.
This is due to the fact that the latter sets a read-write requirement on a fake
resource that the other tasks also want to access in read-only mode.<br/>
Similarly, it's possible to force a task to run after a certain group:
```cpp
builder
.bind("task_1"_hs)
.ro("resource_1"_hs)
.ro("fake"_hs)
.bind("task_2"_hs)
.ro("resource_1"_hs)
.ro("fake"_hs)
.bind("task_3"_hs)
.ro("resource_2"_hs)
.rw("fake"_hs)
```
In this case, since there are a number of processes that want to read a specific
resource, they will do so in parallel by forcing `task_3` to run after all the
others tasks.
## Sync points
Sometimes it's useful to assign the role of _sync point_ to a node.<br/>
Whether it accesses new resources or is simply a watershed, the procedure for
assigning this role to a vertex is always the same: first it's tied to the flow
builder, then the `sync` function is invoked:
```cpp
builder.bind("sync_point"_hs).sync();
```
The choice to assign an _identity_ to this type of nodes lies in the fact that,
more often than not, they also perform operations on resources.<br/>
If this isn't the case, it will still be possible to create no-op vertices to
which empty tasks are assigned.
## Execution graph
Once both the resources and their consumers have been properly registered, the
purpose of this tool is to generate an execution graph that takes into account
all specified constraints to return the best scheduling for the vertices:
```cpp
entt::adjacency_matrix<entt::directed_tag> graph = builder.graph();
```
The search for the main vertices, that is those without in-edges, is usually the
first thing required:
```cpp
for(auto &&vertex: graph) {
if(auto in_edges = graph.in_edges(vertex); in_edges.begin() == in_edges.end()) {
// ...
}
}
```
Starting from them, using the other functions appropriately (such as `out_edges`
to retrieve the children of a given task or `edges` to access their identifiers)
it will be possible to instantiate an execution graph.

99
external/entt/entt/docs/md/lib.md vendored Normal file
View File

@ -0,0 +1,99 @@
# Push EnTT across boundaries
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Working across boundaries](#working-across-boundaries)
* [Smooth until proven otherwise](#smooth-until-proven-otherwise)
* [Meta context](#meta-context)
* [Memory management](#memory-management)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Working across boundaries
`EnTT` has historically had a limit when used across boundaries on Windows in
general and on GNU/Linux when default visibility was set to hidden. The
limitation was mainly due to a custom utility used to assign unique, sequential
identifiers with different types.<br/>
Fortunately, nowadays using `EnTT` across boundaries is much easier.
## Smooth until proven otherwise
Many classes in `EnTT` make extensive use of type erasure for their purposes.
This isn't a problem on itself (in fact, it's the basis of an API so convenient
to use). However, a way is needed to recognize the objects whose type has been
erased on the other side of a boundary.<br/>
The `type_hash` class template is how identifiers are generated and thus made
available to the rest of the library. In general, this class doesn't arouse much
interest. The only exception is when a conflict between identifiers occurs
(definitely uncommon though) or when the default solution proposed by `EnTT`
isn't suitable for the user's purposes.<br/>
The section dedicated to `type_info` contains all the details to get around the
issue in a concise and elegant way. Please refer to the specific documentation.
When working with linked libraries, compile definitions `ENTT_API_EXPORT` and
`ENTT_API_IMPORT` can be used where there is a need to import or export symbols,
so as to make everything work nicely across boundaries.<br/>
On the other hand, everything should run smoothly when working with plugins or
shared libraries that don't export any symbols.
For anyone who needs more details, the test suite contains multiple examples
covering the most common cases (see the `lib` directory for all details).<br/>
It goes without saying that it's impossible to cover **all** possible cases.
However, what is offered should hopefully serve as a basis for all of them.
## Meta context
The runtime reflection system deserves a special mention when it comes to using
it across boundaries.<br/>
Since it's linked already to a static context to which the elements are attached
and different contexts don't relate to each other, they must be _shared_ to
allow the use of meta types across boundaries.
Fortunately, sharing a context is also trivial to do. First of all, the local
one is acquired in the main space:
```cpp
auto handle = entt::locator<entt::meta_ctx>::handle();
```
Then, it's passed to the receiving space that sets it as its default context,
thus discarding or storing aside the local one:
```cpp
entt::locator<entt::meta_ctx>::reset(handle);
```
From now on, both spaces refer to the same context and on it are attached all
new meta types, no matter where they are created.<br/>
Note that resetting the main context doesn't also propagate changes across
boundaries. In other words, resetting a context results in the decoupling of the
two sides and therefore a divergence in the contents.
## Memory Management
There is another subtle problem due to memory management that can lead to
headaches.<br/>
It can occur where there are pools of objects (such as components or events)
dynamically created on demand. This is usually not a problem when working with
linked libraries that rely on the same dynamic runtime. However, it can occur in
the case of plugins or statically linked runtimes.
As an example, imagine creating an instance of `registry` in the main executable
and sharing it with a plugin. If the latter starts working with a component that
is unknown to the former, a dedicated pool is created within the registry on
first use.<br/>
As one can guess, this pool is instantiated on a different side of the boundary
from the `registry`. Therefore, the instance is now managing memory from
different spaces and this can quickly lead to crashes if not properly addressed.
To overcome the risk, it's recommended to use well-defined interfaces that make
fundamental types pass through the boundaries, isolating the instances of the
`EnTT` classes from time to time and as appropriate.<br/>
Refer to the test suite for some examples, read the documentation available
online about this type of issues or consult someone who has already had such
experiences to avoid problems.

266
external/entt/entt/docs/md/links.md vendored Normal file
View File

@ -0,0 +1,266 @@
# EnTT in Action
`EnTT` is widely used in private and commercial applications. I cannot even
mention most of them because of some signatures I put on some documents time
ago. Fortunately, there are also people who took the time to implement open
source projects based on `EnTT` and didn't hold back when it came to documenting
them.
Below an incomplete list of games, applications and articles that can be used as
a reference. Where I put the word _apparently_ means that the use of `EnTT` is
documented but the authors didn't make explicit announcements or contacted me
directly.
I hope this list can grow much more in the future:
* Games:
* [Minecraft](https://minecraft.net/en-us/attribution/) by
[Mojang](https://mojang.com/): of course, **that** Minecraft, see the
open source attributions page for more details.
* [Minecraft Earth](https://www.minecraft.net/en-us/about-earth) by
[Mojang](https://mojang.com/): an augmented reality game for mobile, that
lets users bring Minecraft into the real world.
* [Ember Sword](https://embersword.com/): a modern Free-to-Play MMORPG with a
player-driven economy, a classless combat system, and scarce, tradable
cosmetic collectibles.
* Apparently [Diablo II: Resurrected](https://diablo2.blizzard.com/) by
[Blizzard](https://www.blizzard.com/): monsters, heroes, items, spells, all
resurrected. Thanks unknown insider.
* [Apparently](https://www.youtube.com/watch?v=P8xvOA3ikrQ&t=1105s)
[Call of Duty: Vanguard](https://www.callofduty.com/vanguard) by
[Sledgehammer Games](https://www.sledgehammergames.com/): I can neither
confirm nor deny but there is a license I know in the credits.
* Apparently [D&D Dark Alliance](https://darkalliance.wizards.com) by
[Wizards of the Coast](https://company.wizards.com): your party, their
funeral.
* [TiltedOnline](https://github.com/tiltedphoques/TiltedOnline) by
[Tilted Phoques](https://github.com/tiltedphoques): Skyrim and Fallout 4 mod
to play online.
* [Antkeeper](https://github.com/antkeeper/antkeeper-source): an ant colony
simulation [game](https://antkeeper.com/).
* [Openblack](https://github.com/openblack/openblack): open source
reimplementation of the game _Black & White_ (2001).
* [Land of the Rair](https://github.com/LandOfTheRair/core2): the new backend
of [a retro-style MUD](https://rair.land/) for the new age.
* [Face Smash](https://play.google.com/store/apps/details?id=com.gamee.facesmash):
a game to play with your face.
* [EnTT Pacman](https://github.com/Kerndog73/EnTT-Pacman): an example of how
to make Pacman with `EnTT`.
* [Wacman](https://github.com/carlfindahl/wacman): a pacman clone with OpenGL.
* [Classic Tower Defence](https://github.com/kerndog73/Classic-Tower-Defence):
a tiny little tower defence game featuring a homemade font.
[Check it out](https://indi-kernick.itch.io/classic-tower-defence).
* [The Machine](https://github.com/Kerndog73/The-Machine): a box pushing
puzzler with logic gates and other cool stuff.
[Check it out](https://indi-kernick.itch.io/the-machine-web-version).
* [EnTTPong](https://github.com/DomRe/EnttPong): a basic game made to showcase
different parts of `EnTT` and C++17.
* [Randballs](https://github.com/gale93/randballs): simple `SFML` and `EnTT`
playground.
* [EnTT Tower Defense](https://github.com/Daivuk/tddod): a data oriented tower
defense example.
* [EnTT Breakout](https://github.com/vblanco20-1/entt-breakout): simple
example of a breakout game, using `SDL` and `EnTT`.
* [Arcade puzzle game with EnTT](https://github.com/MasonRG/ArcadePuzzleGame):
arcade puzzle game made in C++ using the `SDL2` and `EnTT` libraries.
* [Snake with EnTT](https://github.com/MasonRG/SnakeGame): simple snake game
made in C++ with the `SDL2` and `EnTT` libraries.
* [Mirrors lasers and robots](https://github.com/guillaume-haerinck/imac-tower-defense):
a small tower defense game based on mirror orientation.
* [PopHead](https://github.com/SPC-Some-Polish-Coders/PopHead/): 2D, Zombie,
RPG game made from scratch in C++.
* [Robotligan](https://github.com/Trisslotten/robotligan): multiplayer
football game.
* [DungeonSlayer](https://github.com/alohaeee/DungeonSlayer): 2D game made
from scratch in C++.
* [3DGame](https://github.com/kwarkGorny/3DGame): 2.5D top-down space shooter.
* [Pulcher](https://github.com/AODQ/pulcher): 2D cross-platform game inspired
by Quake.
* [Destroid](https://github.com/tyrannicaltoucan/destroid): _one-bazillionth_
arcade game about shooting dirty rocks in space, inspired by Asteroids.
* [Wanderer](https://github.com/albin-johansson/wanderer): a 2D exploration
based indie game.
* [Spelunky<EFBFBD> Classic remake](https://github.com/dbeef/spelunky-psp): a truly
multiplatform experience with a rewrite from scratch.
* [CubbyTower](https://github.com/utilForever/CubbyTower): a simple tower
defense game using C++ with Entity Component System (ECS).
* [Runeterra](https://github.com/utilForever/Runeterra): Legends of Runeterra
simulator using C++ with some reinforcement learning.
* [Black Sun](https://store.steampowered.com/app/1670930/Black_Sun/): fly your
space ship through a large 2D open world.
* [PokeMaster](https://github.com/utilForever/PokeMaster): Pokemon Battle
simulator using C++ with some reinforcement learning.
* [HomeHearth](https://youtu.be/GrEWl8npL9Y): choose your hero, protect the
town, before it's too late.
* [City Builder Game](https://github.com/PhiGei2000/CityBuilderGame): a simple
city-building game using C++ and OpenGL.
* [BattleSub](https://github.com/bfeldpw/battlesub): two player 2D submarine
game with some fluid dynamics.
* [Crimson Rush](https://github.com/WilKam01/LuaCGame): a dungeon-crawler and
rougelike inspired game about exploring and surviving as long as possible.
* [Space Fight](https://github.com/cholushkin/SpaceFight): one screen
multi-player arcade shooter game prototype.
* [Confetti Party](https://github.com/hexerei/entt-confetti): C++ sample
application as a starting point using `EnTT` and `SDL2`.
* Engines and the like:
* [Aether Engine](https://hadean.com/spatial-simulation/)
[v1.1+](https://docs.hadean.com/v1.1/Licenses/) by
[Hadean](https://hadean.com/): a library designed for spatially partitioning
agent-based simulations.
* [Fling Engine](https://github.com/flingengine/FlingEngine): a Vulkan game
engine with a focus on data oriented design.
* [NovusCore](https://github.com/novuscore/NovusCore): a modern take on World
of Warcraft emulation.
* [Chrysalis](https://github.com/ivanhawkes/Chrysalis): action RPG SDK for
CRYENGINE games.
* [LM-Engine](https://github.com/Lawrencemm/LM-Engine): the Vim of game
engines.
* [Edyn](https://github.com/xissburg/edyn): a real-time physics engine
organized as an ECS.
* [MushMachine](https://github.com/MadeOfJelly/MushMachine): engine...
vrooooommm.
* [Antara Gaming SDK](https://github.com/KomodoPlatform/antara-gaming-sdk):
the Komodo Gaming Software Development Kit.
* [XVP](https://ravingbots.com/xvp-expansive-vehicle-physics-for-unreal-engine/):
[_eXpansive Vehicle Physics_](https://github.com/raving-bots/xvp/wiki/Plugin-integration-guide)
plugin for Unreal Engine.
* [Apparently](https://teamwisp.github.io/credits/)
[Wisp](https://teamwisp.github.io/product/) by
[Team Wisp](https://teamwisp.github.io/): an advanced real-time ray tracing
renderer built for the demands of video game artists.
* [shiva](https://github.com/Milerius/shiva): modern C++ engine with
modularity.
* [ImGui/EnTT editor](https://github.com/Green-Sky/imgui_entt_entity_editor):
a drop-in, single-file entity editor for `EnTT` that uses `ImGui` as
graphical backend (with
[demo code](https://github.com/Green-Sky/imgui_entt_entity_editor_demo)).
* [SgOgl](https://github.com/stwe/SgOgl): a game engine library for OpenGL
developed for educational purposes.
* [Lumos](https://github.com/jmorton06/Lumos): game engine written in C++
using OpenGL and Vulkan.
* [Silvanus](https://github.com/hobbyistmaker/silvanus): Silvanus Fusion 360
Box Generator.
* [Lina Engine](https://github.com/inanevin/LinaEngine): an open-source,
modular, tiny and fast C++ game engine, aimed to develop 3D desktop games.
* [Spike](https://github.com/FahimFuad/Spike): a powerful game engine which
can run on a toaster.
* [Helena Framework](https://github.com/NIKEA-SOFT/HelenaFramework): a modern
framework in C++17 for backend development.
* [Unity/EnTT](https://github.com/TongTungGiang/unity-entt): tech demo of a
native simulation layer using `EnTT` and `Unity` as a rendering engine.
* [OverEngine](https://github.com/OverShifted/OverEngine): an over-engineered
game engine.
* [Electro](https://github.com/Electro-Technologies/Electro): high performance
3D game engine with a high emphasis on rendering.
* [Kawaii](https://github.com/Mathieu-Lala/Kawaii_Engine): a modern data
oriented game engine.
* [Becketron](https://github.com/Doctor-Foxling/Becketron): a game engine
written mostly in C++.
* [Spatial Engine](https://github.com/luizgabriel/Spatial.Engine): a
cross-platform engine created on top of google's filament rendering engine.
* [Kaguya](https://github.com/KaiH0717/Kaguya): D3D12 Rendering Engine.
* [OpenAWE](https://github.com/OpenAWE-Project/OpenAWE): open implementation
of the Alan Wake Engine.
* [Nazara Engine](https://github.com/DigitalPulseSoftware/NazaraEngine): fast,
cross-platform, object-oriented API to help in daily developer life.
* [Billy Engine](https://github.com/billy4479/BillyEngine): some kind of a 2D
engine based on `SDL2` and `EnTT`.
* [Ducktape](https://github.com/DucktapeEngine/Ducktape): an open source C++
2D & 3D game engine that focuses on being fast and powerful.
* Articles, videos and blog posts:
* [Some posts](https://skypjack.github.io/tags/#entt) on my personal
[blog](https://skypjack.github.io/) are about `EnTT`, for those who want to
know **more** on this project.
* [Game Engine series](https://www.youtube.com/c/TheChernoProject/videos) by
[The Cherno](https://github.com/TheCherno) (not only about `EnTT` but also
on the use of an ECS in general):
- [Intro to EnTT](https://www.youtube.com/watch?v=D4hz0wEB978).
- [Entities and Components](https://www.youtube.com/watch?v=-B1iu4QJTUc).
- [The ENTITY Class](https://www.youtube.com/watch?v=GfSzeAcsBb0).
- [Camera Systems](https://www.youtube.com/watch?v=ubZn7BlrnTU).
- [Scene Camera](https://www.youtube.com/watch?v=UKVFRRufKzo).
- [Native Scripting](https://www.youtube.com/watch?v=iIUhg88MK5M).
- [Native Scripting (now with virtual functions!)](https://www.youtube.com/watch?v=1cHEcrIn8IQ).
- [Scene Hierarchy Panel](https://www.youtube.com/watch?v=wziDnE8guvI).
- [Properties Panel](https://www.youtube.com/watch?v=NBpB0qscF3E).
- [Camera Component UI](https://www.youtube.com/watch?v=RIMt_6agUiU).
- [Drawing Component UI](https://www.youtube.com/watch?v=u3yq8s3KuSE).
- [Transform Component UI](https://www.youtube.com/watch?v=8JqcXYbzPJc).
- [Adding/Removing Entities and Components UI](https://www.youtube.com/watch?v=PsyGmsIgp9M).
- [Saving and Loading Scenes](https://www.youtube.com/watch?v=IEiOP7Y-Mbc).
- ... And so on.
[Check out](https://www.youtube.com/channel/UCQ-W1KE9EYfdxhL6S4twUNw) the
_Game Engine Series_ by The Cherno for more videos.
* [Space Battle: Huge edition](http://victor.madtriangles.com/code%20experiment/2018/06/11/post-ecs-battle-huge.html):
huge space battle built entirely from scratch.
* [Space Battle](https://github.com/vblanco20-1/ECS_SpaceBattle): huge space
battle built on `UE4`.
* [Experimenting with ECS in UE4](http://victor.madtriangles.com/code%20experiment/2018/03/25/post-ue4-ecs-battle.html):
interesting article about `UE4` and `EnTT`.
* [Implementing ECS architecture in UE4](https://forums.unrealengine.com/development-discussion/c-gameplay-programming/1449913-implementing-ecs-architecture-in-ue4-giant-space-battle):
giant space battle.
* [Conan Adventures (SFML and EnTT in C++)](https://leinnan.github.io/blog/conan-adventuressfml-and-entt-in-c.html):
create projects in modern C++ using `SFML`, `EnTT`, `Conan` and `CMake`.
* [Adding EnTT ECS to Chrysalis](https://www.tauradius.com/post/adding-an-ecs-to-chrysalis/):
a blog entry (and its
[follow-up](https://www.tauradius.com/post/chrysalis-update-2020-08-02/))
about the integration of `EnTT` into `Chrysalis`, an action RPG SDK for
CRYENGINE games.
* [Creating Minecraft in One Week with C++ and Vulkan](https://vazgriz.com/189/creating-minecraft-in-one-week-with-c-and-vulkan/):
a crack at recreating Minecraft in one week using a custom C++ engine and
Vulkan ([code included](https://github.com/vazgriz/VoxelGame)).
* [Ability Creator](https://www.erichildebrand.net/blog/ability-creator-project-retrospect):
project retrospect by [Eric Hildebrand](https://www.erichildebrand.net/).
* [EnTT Entity Component System Gaming Library](https://gamefromscratch.com/entt-entity-component-system-gaming-library/):
`EnTT` on GameFromScratch.com.
* [Custom C++ server for UE5](https://youtu.be/fbXZVNCOvjM) optimized for
MMO(RPG)s and its [follow-up](https://youtu.be/yGlZeopx2hU) episode about
player bots and full external ECS: a series definitely worth looking at.
* Any Other Business:
* [ArcGIS Runtime SDKs](https://developers.arcgis.com/arcgis-runtime/) by
[Esri](https://www.esri.com/): they use `EnTT` for the internal ECS and the
cross platform C++ rendering engine. The SDKs are utilized by a lot of
enterprise custom apps, as well as by Esri for its own public applications
such as
[Explorer](https://play.google.com/store/apps/details?id=com.esri.explorer),
[Collector](https://play.google.com/store/apps/details?id=com.esri.arcgis.collector)
and
[Navigator](https://play.google.com/store/apps/details?id=com.esri.navigator).
* [FASTSUITE Edition 2](https://www.fastsuite.com/en_EN/fastsuite/fastsuite-edition-2.html)
by [Cenit](http://www.cenit.com/en_EN/about-us/overview.html): they use
`EnTT` to drive their simulation, that is, the communication between robot
controller emulator and renderer.
* [Ragdoll](https://ragdolldynamics.com/): real-time physics for Autodesk Maya
2020.
* [Project Lagrange](https://github.com/adobe/lagrange): a robust geometry
processing library by [Adobe](https://github.com/adobe).
* [AtomicDEX](https://github.com/KomodoPlatform/atomicDEX-Desktop): a secure
wallet and non-custodial decentralized exchange rolled into one application.
* [Apparently](https://www.linkedin.com/in/skypjack/)
[NIO](https://www.nio.io/): there was a collaboration to make some changes
to `EnTT`, at the time used for internal projects.
* [Apparently](https://www.linkedin.com/jobs/view/architekt-c%2B%2B-at-tieto-1219512333/)
[Tieto](https://www.tieto.com/): they published a job post where `EnTT` was
listed on their software stack.
* [Sequentity](https://github.com/alanjfs/sequentity): A MIDI-like
sequencer/tracker for C++ and `ImGui` (with `Magnum` and `EnTT`).
* [EnTT meets Sol2](https://github.com/skaarj1989/entt-meets-sol2): freely
available examples of how to combine `EnTT` and `Sol2`.
* [Godot meets EnTT](https://github.com/portaloffreedom/godot_entt_example/):
a simple example on how to use `EnTT` within
[`Godot`](https://godotengine.org/).
* [Godot and GameNetworkingSockets meet EnTT](https://github.com/portaloffreedom/godot_entt_net_example):
a simple example on how to use `EnTT` and
[`GameNetworkingSockets`](https://github.com/ValveSoftware/GameNetworkingSockets)
within [`Godot`](https://godotengine.org/).
* [MatchOneEntt](https://github.com/mhaemmerle/MatchOneEntt): port of
[Match One](https://github.com/sschmid/Match-One) for `Entitas-CSharp`.
* GitHub contains also
[many other examples](https://github.com/search?o=desc&q=%22skypjack%2Fentt%22&s=indexed&type=Code)
of use of `EnTT` from which to take inspiration if interested.
If you know of other resources out there that are about `EnTT`, feel free to
open an issue or a PR and I'll be glad to add them to this page.

88
external/entt/entt/docs/md/locator.md vendored Normal file
View File

@ -0,0 +1,88 @@
# Crash Course: service locator
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [Service locator](#service-locator)
* [Opaque handles](#opaque-handles)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
Usually, service locators are tightly bound to the services they expose and it's
hard to define a general purpose solution.<br/>
This tiny class tries to fill the gap and gets rid of the burden of defining a
different specific locator for each application.
# Service locator
The service locator API tries to mimic that of `std::optional` and adds some
extra functionalities on top of it such as allocator support.<br/>
There are a couple of functions to set up a service, namely `emplace` and
`allocate_emplace`:
```cpp
entt::locator<interface>::emplace<service>(argument);
entt::locator<interface>::allocate_emplace<service>(allocator, argument);
```
The difference is that the latter expects an allocator as the first argument and
uses it to allocate the service itself.<br/>
Once a service is set up, it's retrieved using the `value` function:
```cpp
interface &service = entt::locator<interface>::value();
```
Since the service may not be set (and therefore this function may result in an
undefined behavior), the `has_value` and `value_or` functions are also available
to test a service locator and to get a fallback service in case there is none:
```cpp
if(entt::locator<interface>::has_value()) {
// ...
}
interface &service = entt::locator<interface>::value_or<fallback_impl>(argument);
```
All arguments are used only if necessary, that is, if a service doesn't already
exist and therefore the fallback service is constructed and returned. In all
other cases, they are discarded.<br/>
Finally, to reset a service, use the `reset` function.
## Opaque handles
Sometimes it's useful to _transfer_ a copy of a service to another locator. For
example, when working across boundaries it's common to _share_ a service with a
dynamically loaded module.<br/>
Options aren't much in this case. Among these is the possibility of _exporting_
services and assigning them to a different locator.
This is what the `handle` and `reset` functions are meant for.<br/>
The former returns an opaque object useful for _exporting_ (or rather, obtaining
a reference to) a service. The latter also accepts an optional argument to a
handle which then allows users to reset a service by initializing it with an
opaque handle:
```cpp
auto handle = entt::locator<interface>::handle();
entt::locator<interface>::reset(handle);
```
It's worth noting that it's possible to get handles for uninitialized services
and use them with other locators. Of course, all a user will get is to have an
uninitialized service elsewhere as well.
Note that exporting a service allows users to _share_ the object currently set
in a locator. Replacing it won't replace the element even where a service has
been configured with a handle to the previous item.<br/>
In other words, if an audio service is replaced with a null object to silence an
application and the original service was shared, this operation won't propagate
to the other locators. Therefore, a module that share the ownership of the
original audio service is still able to emit sounds.

1023
external/entt/entt/docs/md/meta.md vendored Normal file

File diff suppressed because it is too large Load Diff

359
external/entt/entt/docs/md/poly.md vendored Normal file
View File

@ -0,0 +1,359 @@
# Crash Course: poly
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [Other libraries](#other-libraries)
* [Concept and implementation](#concept-and-implementation)
* [Deduced interface](#deduced-interface)
* [Defined interface](#defined-interface)
* [Fulfill a concept](#fulfill-a-concept)
* [Inheritance](#inheritance)
* [Static polymorphism in the wild](#static-polymorphism-in-the-wild)
* [Storage size and alignment requirement](#storage-size-and-alignment-requirement)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
Static polymorphism is a very powerful tool in C++, albeit sometimes cumbersome
to obtain.<br/>
This module aims to make it simple and easy to use.
The library allows to define _concepts_ as interfaces to fulfill with concrete
classes without having to inherit from a common base.<br/>
This is, among others, one of the advantages of static polymorphism in general
and of a generic wrapper like that offered by the `poly` class template in
particular.<br/>
What users get is an object that can be passed around as such and not through a
reference or a pointer, as happens when it comes to working with dynamic
polymorphism.
Since the `poly` class template makes use of `entt::any` internally, it also
supports most of its feature. Among the most important, the possibility to
create aliases to existing and thus unmanaged objects. This allows users to
exploit the static polymorphism while maintaining ownership of objects.<br/>
Likewise, the `poly` class template also benefits from the small buffer
optimization offered by the `entt::any` class and therefore minimizes the number
of allocations, avoiding them altogether where possible.
## Other libraries
There are some very interesting libraries regarding static polymorphism.<br/>
Among all, the two that I prefer are:
* [`dyno`](https://github.com/ldionne/dyno): runtime polymorphism done right.
* [`Poly`](https://github.com/facebook/folly/blob/master/folly/docs/Poly.md):
a class template that makes it easy to define a type-erasing polymorphic
object wrapper.
The former is admittedly an experimental library, with many interesting ideas.
I've some doubts about the usefulness of some feature in real world projects,
but perhaps my lack of experience comes into play here. In my opinion, its only
flaw is the API which I find slightly more cumbersome than other solutions.<br/>
The latter was undoubtedly a source of inspiration for this module, although I
opted for different choices in the implementation of both the final API and some
feature.
Either way, the authors are gurus of the C++ community, people I only have to
learn from.
# Concept and implementation
The first thing to do to create a _type-erasing polymorphic object wrapper_ (to
use the terminology introduced by Eric Niebler) is to define a _concept_ that
types will have to adhere to.<br/>
For this purpose, the library offers a single class that supports both deduced
and fully defined interfaces. Although having interfaces deduced automatically
is convenient and allows users to write less code in most cases, this has some
limitations and it's therefore useful to be able to get around the deduction by
providing a custom definition for the static virtual table.
Once the interface is defined, it will be sufficient to provide a generic
implementation to fulfill the concept.<br/>
Also in this case, the library allows customizations based on types or families
of types, so as to be able to go beyond the generic case where necessary.
## Deduced interface
This is how a concept with a deduced interface is introduced:
```cpp
struct Drawable: entt::type_list<> {
template<typename Base>
struct type: Base {
void draw() { this->template invoke<0>(*this); }
};
// ...
};
```
It's recognizable by the fact that it inherits from an empty type list.<br/>
Functions can also be const, accept any number of parameters and return a type
other than `void`:
```cpp
struct Drawable: entt::type_list<> {
template<typename Base>
struct type: Base {
bool draw(int pt) const { return this->template invoke<0>(*this, pt); }
};
// ...
};
```
In this case, all parameters must be passed to `invoke` after the reference to
`this` and the return value is whatever the internal call returns.<br/>
As for `invoke`, this is a name that is injected into the _concept_ through
`Base`, from which one must necessarily inherit. Since it's also a dependent
name, the `this-> template` form is unfortunately necessary due to the rules of
the language. However, there exists also an alternative that goes through an
external call:
```cpp
struct Drawable: entt::type_list<> {
template<typename Base>
struct type: Base {
void draw() const { entt::poly_call<0>(*this); }
};
// ...
};
```
Once the _concept_ is defined, users must provide a generic implementation of it
in order to tell the system how any type can satisfy its requirements. This is
done via an alias template within the concept itself.<br/>
The index passed as a template parameter to either `invoke` or `poly_call`
refers to how this alias is defined.
## Defined interface
A fully defined concept is no different to one for which the interface is
deduced, with the only difference that the list of types is not empty this time:
```cpp
struct Drawable: entt::type_list<void()> {
template<typename Base>
struct type: Base {
void draw() { entt::poly_call<0>(*this); }
};
// ...
};
```
Again, parameters and return values other than `void` are allowed. Also, the
function type must be const when the method to bind to it is const:
```cpp
struct Drawable: entt::type_list<bool(int) const> {
template<typename Base>
struct type: Base {
bool draw(int pt) const { return entt::poly_call<0>(*this, pt); }
};
// ...
};
```
Why should a user fully define a concept if the function types are the same as
the deduced ones?<br>
Because, in fact, this is exactly the limitation that can be worked around by
manually defining the static virtual table.
When things are deduced, there is an implicit constraint.<br/>
If the concept exposes a member function called `draw` with function type
`void()`, a concept can be satisfied:
* Either by a class that exposes a member function with the same name and the
same signature.
* Or through a lambda that makes use of existing member functions from the
interface itself.
In other words, it's not possible to make use of functions not belonging to the
interface, even if they are present in the types that fulfill the concept.<br/>
Similarly, it's not possible to deduce a function in the static virtual table
with a function type different from that of the associated member function in
the interface itself.
Explicitly defining a static virtual table suppresses the deduction step and
allows maximum flexibility when providing the implementation for a concept.
## Fulfill a concept
The `impl` alias template of a concept is used to define how it's fulfilled:
```cpp
struct Drawable: entt::type_list<> {
// ...
template<typename Type>
using impl = entt::value_list<&Type::draw>;
};
```
In this case, it's stated that the `draw` method of a generic type will be
enough to satisfy the requirements of the `Drawable` concept.<br/>
Both member functions and free functions are supported to fulfill concepts:
```cpp
template<typename Type>
void print(Type &self) { self.print(); }
struct Drawable: entt::type_list<void()> {
// ...
template<typename Type>
using impl = entt::value_list<&print<Type>>;
};
```
Likewise, as long as the parameter types and return type support conversions to
and from those of the function type referenced in the static virtual table, the
actual implementation may differ in its function type since it's erased
internally.<br/>
Moreover, the `self` parameter isn't strictly required by the system and can be
left out for free functions if not required.
Refer to the inline documentation for more details.
# Inheritance
_Concept inheritance_ is straightforward due to how poly looks like in `EnTT`.
Therefore, it's quite easy to build hierarchies of concepts if necessary.<br/>
The only constraint is that all concepts in a hierarchy must belong to the same
_family_, that is, they must be either all deduced or all defined.
For a deduced concept, inheritance is achieved in a few steps:
```cpp
struct DrawableAndErasable: entt::type_list<> {
template<typename Base>
struct type: typename Drawable::template type<Base> {
static constexpr auto base = std::tuple_size_v<typename entt::poly_vtable<Drawable>::type>;
void erase() { entt::poly_call<base + 0>(*this); }
};
template<typename Type>
using impl = entt::value_list_cat_t<
typename Drawable::impl<Type>,
entt::value_list<&Type::erase>
>;
};
```
The static virtual table is empty and must remain so.<br/>
On the other hand, `type` no longer inherits from `Base` and instead forwards
its template parameter to the type exposed by the _base class_. Internally, the
size of the static virtual table of the base class is used as an offset for the
local indexes.<br/>
Finally, by means of the `value_list_cat_t` utility, the implementation consists
in appending the new functions to the previous list.
As for a defined concept instead, also the list of types must be extended, in a
similar way to what is shown for the implementation of the above concept.<br/>
To do this, it's useful to declare a function that allows to convert a _concept_
into its underlying `type_list` object:
```cpp
template<typename... Type>
entt::type_list<Type...> as_type_list(const entt::type_list<Type...> &);
```
The definition isn't strictly required, since the function will only be used
through a `decltype` as it follows:
```cpp
struct DrawableAndErasable: entt::type_list_cat_t<
decltype(as_type_list(std::declval<Drawable>())),
entt::type_list<void()>
> {
// ...
};
```
Similar to above, `type_list_cat_t` is used to concatenate the underlying static
virtual table with the new function types.<br/>
Everything else is the same as already shown instead.
# Static polymorphism in the wild
Once the _concept_ and implementation have been introduced, it will be possible
to use the `poly` class template to contain instances that meet the
requirements:
```cpp
using drawable = entt::poly<Drawable>;
struct circle {
void draw() { /* ... */ }
};
struct square {
void draw() { /* ... */ }
};
// ...
drawable instance{circle{}};
instance->draw();
instance = square{};
instance->draw();
```
The `poly` class template offers a wide range of constructors, from the default
one (which will return an uninitialized `poly` object) to the copy and move
constructors, as well as the ability to create objects in-place.<br/>
Among others, there is also a constructor that allows users to wrap unmanaged
objects in a `poly` instance (either const or non-const ones):
```cpp
circle shape;
drawable instance{std::in_place_type<circle &>, shape};
```
Similarly, it's possible to create non-owning copies of `poly` from an existing
object:
```cpp
drawable other = instance.as_ref();
```
In both cases, although the interface of the `poly` object doesn't change, it
won't construct any element or take care of destroying the referenced objects.
Note also how the underlying concept is accessed via a call to `operator->` and
not directly as `instance.draw()`.<br/>
This allows users to decouple the API of the wrapper from that of the concept.
Therefore, where `instance.data()` will invoke the `data` member function of the
poly object, `instance->data()` will map directly to the functionality exposed
by the underlying concept.
# Storage size and alignment requirement
Under the hood, the `poly` class template makes use of `entt::any`. Therefore,
it can take advantage of the possibility of defining at compile-time the size of
the storage suitable for the small buffer optimization as well as the alignment
requirements:
```cpp
entt::basic_poly<Drawable, sizeof(double[4]), alignof(double[4])>
```
The default size is `sizeof(double[2])`, which seems like a good compromise
between a buffer that is too large and one unable to hold anything larger than
an integer. The alignment requirement is optional instead and by default such
that it's the most stringent (the largest) for any object whose size is at most
equal to the one provided.<br/>
It's worth noting that providing a size of 0 (which is an accepted value in all
respects) will force the system to dynamically allocate the contained objects in
all cases.

212
external/entt/entt/docs/md/process.md vendored Normal file
View File

@ -0,0 +1,212 @@
# Crash Course: cooperative scheduler
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [The process](#the-process)
* [Adaptor](#adaptor)
* [The scheduler](#the-scheduler)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
Sometimes processes are a useful tool to work around the strict definition of a
system and introduce logic in a different way, usually without resorting to the
introduction of other components.
`EnTT` offers a minimal support to this paradigm by introducing a few classes
that users can use to define and execute cooperative processes.
# The process
A typical process must inherit from the `process` class template that stays true
to the CRTP idiom. Moreover, derived classes must specify what's the intended
type for elapsed times.
A process should expose publicly the following member functions whether needed
(note that it isn't required to define a function unless the derived class wants
to _override_ the default behavior):
* `void update(Delta, void *);`
It's invoked once per tick until a process is explicitly aborted or it
terminates either with or without errors. Even though it's not mandatory to
declare this member function, as a rule of thumb each process should at
least define it to work properly. The `void *` parameter is an opaque pointer
to user data (if any) forwarded directly to the process during an update.
* `void init();`
It's invoked when the process joins the running queue of a scheduler. This
happens as soon as it's attached to the scheduler if the process is a top
level one, otherwise when it replaces its parent if the process is a
continuation.
* `void succeeded();`
It's invoked in case of success, immediately after an update and during the
same tick.
* `void failed();`
It's invoked in case of errors, immediately after an update and during the
same tick.
* `void aborted();`
It's invoked only if a process is explicitly aborted. There is no guarantee
that it executes in the same tick, this depends solely on whether the
process is aborted immediately or not.
Derived classes can also change the internal state of a process by invoking
`succeed` and `fail`, as well as `pause` and `unpause` the process itself. All
these are protected member functions made available to be able to manage the
life cycle of a process from a derived class.
Here is a minimal example for the sake of curiosity:
```cpp
struct my_process: entt::process<my_process, std::uint32_t> {
using delta_type = std::uint32_t;
my_process(delta_type delay)
: remaining{delay}
{}
void update(delta_type delta, void *) {
remaining -= std::min(remaining, delta);
// ...
if(!remaining) {
succeed();
}
}
private:
delta_type remaining;
};
```
## Adaptor
Lambdas and functors can't be used directly with a scheduler for they are not
properly defined processes with managed life cycles.<br/>
This class helps in filling the gap and turning lambdas and functors into
full-featured processes usable by a scheduler.
The function call operator has a signature similar to the one of the `update`
function of a process but for the fact that it receives two extra arguments to
call whenever a process is terminated with success or with an error:
```cpp
void(Delta delta, void *data, auto succeed, auto fail);
```
Parameters have the following meaning:
* `delta` is the elapsed time.
* `data` is an opaque pointer to user data if any, `nullptr` otherwise.
* `succeed` is a function to call when a process terminates with success.
* `fail` is a function to call when a process terminates with errors.
Both `succeed` and `fail` accept no parameters at all.
Note that usually users shouldn't worry about creating adaptors at all. A
scheduler creates them internally each and every time a lambda or a functor is
used as a process.
# The scheduler
A cooperative scheduler runs different processes and helps managing their life
cycles.
Each process is invoked once per tick. If it terminates, it's removed
automatically from the scheduler and it's never invoked again. Otherwise it's
a good candidate to run one more time the next tick.<br/>
A process can also have a child. In this case, the parent process is replaced
with its child when it terminates and only if it returns with success. In case
of errors, both the parent process and its child are discarded. This way, it's
easy to create chain of processes to run sequentially.
Using a scheduler is straightforward. To create it, users must provide only the
type for the elapsed times and no arguments at all:
```cpp
entt::scheduler<std::uint32_t> scheduler;
```
It has member functions to query its internal data structures, like `empty` or
`size`, as well as a `clear` utility to reset it to a clean state:
```cpp
// checks if there are processes still running
const auto empty = scheduler.empty();
// gets the number of processes still running
entt::scheduler<std::uint32_t>::size_type size = scheduler.size();
// resets the scheduler to its initial state and discards all the processes
scheduler.clear();
```
To attach a process to a scheduler there are mainly two ways:
* If the process inherits from the `process` class template, it's enough to
indicate its type and submit all the parameters required to construct it to
the `attach` member function:
```cpp
scheduler.attach<my_process>(1000u);
```
* Otherwise, in case of a lambda or a functor, it's enough to provide an
instance of the class to the `attach` member function:
```cpp
scheduler.attach([](auto...){ /* ... */ });
```
In both cases, the return value is an opaque object that offers a `then` member
function to use to create chains of processes to run sequentially.<br/>
As a minimal example of use:
```cpp
// schedules a task in the form of a lambda function
scheduler.attach([](auto delta, void *, auto succeed, auto fail) {
// ...
})
// appends a child in the form of another lambda function
.then([](auto delta, void *, auto succeed, auto fail) {
// ...
})
// appends a child in the form of a process class
.then<my_process>(1000u);
```
To update a scheduler and therefore all its processes, the `update` member
function is the way to go:
```cpp
// updates all the processes, no user data are provided
scheduler.update(delta);
// updates all the processes and provides them with custom data
scheduler.update(delta, &data);
```
In addition to these functions, the scheduler offers an `abort` member function
that can be used to discard all the running processes at once:
```cpp
// aborts all the processes abruptly ...
scheduler.abort(true);
// ... or gracefully during the next tick
scheduler.abort();
```

75
external/entt/entt/docs/md/reference.md vendored Normal file
View File

@ -0,0 +1,75 @@
# Similar projects
There are many projects similar to `EnTT`, both open source and not.<br/>
Some even borrowed some ideas from this library and expressed them in different
languages.<br/>
Others developed different architectures from scratch and therefore offer
alternative solutions with their pros and cons.
Below an incomplete list of those that I've come across so far.<br/>
If some terms or designs aren't clear, I recommend referring to the
[_ECS Back and Forth_](https://skypjack.github.io/tags/#ecs) series for all the
details.
I hope this list can grow much more in the future:
* C:
* [destral_ecs](https://github.com/roig/destral_ecs): a single-file ECS based
on sparse sets.
* [Diana](https://github.com/discoloda/Diana): an ECS that uses sparse sets to
keep track of entities in systems.
* [Flecs](https://github.com/SanderMertens/flecs): a multithreaded archetype
ECS based on semi-contiguous arrays rather than chunks.
* [lent](https://github.com/nem0/lent): the Donald Trump of the ECS libraries.
* C++:
* [decs](https://github.com/vblanco20-1/decs): a chunk based archetype ECS.
* [ecst](https://github.com/SuperV1234/ecst): a multithreaded compile-time
ECS that uses sparse sets to keep track of entities in systems.
* [EntityX](https://github.com/alecthomas/entityx): a bitset based ECS that
uses a single large matrix of components indexed with entities.
* [Gaia-ECS](https://github.com/richardbiely/gaia-ecs): a chunk based
archetype ECS.
* [Polypropylene](https://github.com/pmbittner/Polypropylene): a hybrid
solution between an ECS and dynamic mixins.
* C#
* [Entitas](https://github.com/sschmid/Entitas-CSharp): the ECS framework for
C# and Unity, where _reactive systems_ were invented.
* [LeoECS](https://github.com/Leopotam/ecs): simple lightweight C# Entity
Component System framework.
* [Svelto.ECS](https://github.com/sebas77/Svelto.ECS): a very interesting
platform agnostic and table based ECS framework.
* Go:
* [gecs](https://github.com/tutumagi/gecs): a sparse sets based ECS inspired
by `EnTT`.
* Javascript:
* [\@javelin/ecs](https://github.com/3mcd/javelin/tree/master/packages/ecs):
an archetype ECS in TypeScript.
* [ecsy](https://github.com/MozillaReality/ecsy): I haven't had the time to
investigate the underlying design of `ecsy` but it looks cool anyway.
* Perl:
* [Game::Entities](https://gitlab.com/jjatria/perl-game-entities): a simple
entity registry for ECS designs inspired by `EnTT`.
* Raku:
* [Game::Entities](https://gitlab.com/jjatria/raku-game-entities): a simple
entity registry for ECS designs inspired by `EnTT`.
* Rust:
* [Legion](https://github.com/TomGillen/legion): a chunk based archetype ECS.
* [Shipyard](https://github.com/leudz/shipyard): it borrows some ideas from
`EnTT` and offers a sparse sets based ECS with grouping functionalities.
* [Sparsey](https://github.com/LechintanTudor/sparsey): sparse set based ECS
written in Rust.
* [Specs](https://github.com/amethyst/specs): a parallel ECS based mainly on
hierarchical bitsets that allows different types of storage as needed.
* Zig
* [zig-ecs](https://github.com/prime31/zig-ecs): a _zig-ification_ of `EnTT`.
If you know of other resources out there that can be of interest for the reader,
feel free to open an issue or a PR and I'll be glad to add them to this page.

191
external/entt/entt/docs/md/resource.md vendored Normal file
View File

@ -0,0 +1,191 @@
# Crash Course: resource management
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [The resource, the loader and the cache](#the-resource-the-loader-and-the-cache)
* [Resource handle](#resource-handle)
* [Loaders](#loader)
* [The cache class](#the-cache)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
Resource management is usually one of the most critical parts of a game.
Solutions are often tuned to the particular application. There exist several
approaches and all of them are perfectly fine as long as they fit the
requirements of the piece of software in which they are used.<br/>
Examples are loading everything on start, loading on request, predictive
loading, and so on.
`EnTT` doesn't pretend to offer a _one-fits-all_ solution for the different
cases.<br/>
Instead, the library comes with a minimal, general purpose resource cache that
might be useful in many cases.
# The resource, the loader and the cache
Resource, loader and cache are the three main actors for the purpose.<br/>
The _resource_ is an image, an audio, a video or any other type:
```cpp
struct my_resource { const int value; };
```
The _loader_ is a callable type the aim of which is to load a specific resource:
```cpp
struct my_loader final {
using result_type = std::shared_ptr<my_resource>;
result_type operator()(int value) const {
// ...
return std::make_shared<my_resource>(value);
}
};
```
Its function operator can accept any arguments and should return a value of the
declared result type (`std::shared_ptr<my_resource>` in the example).<br/>
A loader can also overload its function call operator to make it possible to
construct the same or another resource from different lists of arguments.
Finally, a cache is a specialization of a class template tailored to a specific
resource and (optionally) a loader:
```cpp
using my_cache = entt::resource_cache<my_resource, my_loader>;
// ...
my_cache cache{};
```
The class is designed to create different caches for different resource types
and to manage each one independently in the most appropriate way.<br/>
As a (very) trivial example, audio tracks can survive in most of the scenes of
an application while meshes can be associated with a single scene only, then
discarded when a player leaves it.
## Resource handle
Resources aren't returned directly to the caller. Instead, they are wrapped in a
_resource handle_, an instance of the `entt::resource` class template.<br/>
For those who know the _flyweight design pattern_ already, that's exactly what
it is. To all others, this is the time to brush up on some notions instead.
A shared pointer could have been used as a resource handle. In fact, the default
implementation mostly maps the interface of its standard counterpart and only
adds a few things on top of it.<br/>
However, the handle in `EnTT` is designed as a standalone class template. This
is due to the fact that specializing a class in the standard library is often
undefined behavior while having the ability to specialize the handle for one,
more or all resource types could help over time.
## Loaders
A loader is responsible for _loading_ resources (quite obviously).<br/>
By default, it's just a callable object that forwards its arguments to the
resource itself. That is, a _passthrough type_. All the work is demanded to the
constructor(s) of the resource itself.<br/>
Loaders also are fully customizable as expected.
A custom loader is a class with at least one function call operator and a member
type named `result_type`.<br/>
The loader isn't required to return a resource handle. As long as `return_type`
is suitable for constructing a handle, that's fine.
When using the default handle, it expects a resource type which is convertible
to or suitable for constructing an `std::shared_ptr<Type>` (where `Type` is the
actual resource type).<br/>
In other terms, the loader should return shared pointers to the given resource
type. However, this isn't mandatory. Users can easily get around this constraint
by specializing both the handle and the loader.
A cache forwards all its arguments to the loader if required. This means that
loaders can also support tag dispatching to offer different loading policies:
```cpp
struct my_loader {
using result_type = std::shared_ptr<my_resource>;
struct from_disk_tag{};
struct from_network_tag{};
template<typename Args>
result_type operator()(from_disk_tag, Args&&... args) {
// ...
return std::make_shared<my_resource>(std::forward<Args>(args)...);
}
template<typename Args>
result_type operator()(from_network_tag, Args&&... args) {
// ...
return std::make_shared<my_resource>(std::forward<Args>(args)...);
}
}
```
This makes the whole loading logic quite flexible and easy to extend over time.
## The cache class
The cache is the class that is asked to _connect the dots_.<br/>
It loads the resources, stores them aside and returns handles as needed:
```cpp
entt::resource_cache<my_resource, my_loader> cache{};
```
Under the hood, a cache is nothing more than a map where the key value has type
`entt::id_type` while the mapped value is whatever type its loader returns.<br/>
For this reason, it offers most of the functionalities a user would expect from
a map, such as `empty` or `size` and so on. Similarly, it's an iterable type
that also supports indexing by resource id:
```cpp
for(auto [id, res]: cache) {
// ...
}
if(entt::resource<my_resource> res = cache["resource/id"_hs]; res) {
// ...
}
```
Please, refer to the inline documentation for all the details about the other
functions (such as `contains` or `erase`).
Set aside the part of the API that this class _shares_ with a map, it also adds
something on top of it in order to address the most common requirements of a
resource cache.<br/>
In particular, it doesn't have an `emplace` member function which is replaced by
`load` and `force_load` instead (where the former loads a new resource only if
not present while the second triggers a forced loading in any case):
```cpp
auto ret = cache.load("resource/id"_hs);
// true only if the resource was not already present
const bool loaded = ret.second;
// takes the resource handle pointed to by the returned iterator
entt::resource<my_resource> res = ret.first->second;
```
Note that the hashed string is used for convenience in the example above.<br/>
Resource identifiers are nothing more than integral values. Therefore, plain
numbers as well as non-class enum value are accepted.
It's worth mentioning that the iterators of a cache as well as its indexing
operators return resource handles rather than instances of the mapped type.<br/>
Since the cache has no control over the loader and a resource isn't required to
also be convertible to bool, these handles can be invalid. This usually means an
error in the user logic but it may also be an _expected_ event.<br/>
It's therefore recommended to verify handles validity with a check in debug (for
example, when loading) or an appropriate logic in retail.

549
external/entt/entt/docs/md/signal.md vendored Normal file
View File

@ -0,0 +1,549 @@
# Crash Course: events, signals and everything in between
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Introduction](#introduction)
* [Delegate](#delegate)
* [Runtime arguments](#runtime-arguments)
* [Lambda support](#lambda-support)
* [Signals](#signals)
* [Event dispatcher](#event-dispatcher)
* [Named queues](#named-queues)
* [Event emitter](#event-emitter)
<!--
@endcond TURN_OFF_DOXYGEN
-->
# Introduction
Signals are more often than not a core part of games and software architectures
in general.<br/>
They help to decouple the various parts of a system while allowing them to
communicate with each other somehow.
The so called _modern C++_ comes with a tool that can be useful in this regard,
the `std::function`. As an example, it can be used to create delegates.<br/>
However, there is no guarantee that an `std::function` doesn't perform
allocations under the hood and this could be problematic sometimes. Furthermore,
it solves a problem but may not adapt well to other requirements that may arise
from time to time.
In case that the flexibility and power of an `std::function` isn't required or
if the price to pay for them is too high, `EnTT` offers a complete set of
lightweight classes to solve the same and many other problems.
# Delegate
A delegate can be used as a general purpose invoker with no memory overhead for
free functions and member functions provided along with an instance on which to
invoke them.<br/>
It doesn't claim to be a drop-in replacement for an `std::function`, so don't
expect to use it whenever an `std::function` fits well. That said, it's most
likely even a better fit than an `std::function` in a lot of cases, so expect to
use it quite a lot anyway.
The interface is trivial. It offers a default constructor to create empty
delegates:
```cpp
entt::delegate<int(int)> delegate{};
```
What is needed to create an instance is to specify the type of the function the
delegate _accepts_, that is the signature of the functions it models.<br/>
However, attempting to use an empty delegate by invoking its function call
operator results in undefined behavior or most likely a crash.
There exist a few overloads of the `connect` member function to initialize a
delegate:
```cpp
int f(int i) { return i; }
struct my_struct {
int f(const int &i) const { return i; }
};
// bind a free function to the delegate
delegate.connect<&f>();
// bind a member function to the delegate
my_struct instance;
delegate.connect<&my_struct::f>(instance);
```
The delegate class also accepts data members, if needed. In this case, the
function type of the delegate is such that the parameter list is empty and the
value of the data member is at least convertible to the return type.
Free functions having type equivalent to `void(T &, args...)` are accepted as
well. The first argument `T &` is considered a payload and the function will
receive it back every time it's invoked. In other terms, this works just fine
with the above definition:
```cpp
void g(const char &c, int i) { /* ... */ }
const char c = 'c';
delegate.connect<&g>(c);
delegate(42);
```
The function `g` is invoked with a reference to `c` and `42`. However, the
function type of the delegate is still `void(int)`. This is also the signature
of its function call operator.<br/>
Another interesting aspect of the delegate class is that it accepts functions
with a list of parameters that is shorter than that of its function type:
```cpp
void g() { /* ... */ }
delegate.connect<&g>();
delegate(42);
```
Where the function type of the delegate is `void(int)` as above. It goes without
saying that the extra arguments are silently discarded internally.<br/>
This is a nice-to-have feature in a lot of cases, as an example when the
`delegate` class is used as a building block of a signal-slot system.
To create and initialize a delegate at once, there are a few specialized
constructors. Because of the rules of the language, the listener is provided by
means of the `entt::connect_arg` variable template:
```cpp
entt::delegate<int(int)> func{entt::connect_arg<&f>};
```
Aside `connect`, a `disconnect` counterpart isn't provided. Instead, there
exists a `reset` member function to use to clear a delegate.<br/>
To know if a delegate is empty, it can be used explicitly in every conditional
statement:
```cpp
if(delegate) {
// ...
}
```
Finally, to invoke a delegate, the function call operator is the way to go as
already shown in the examples above:
```cpp
auto ret = delegate(42);
```
In all cases, listeners don't have to strictly follow the signature of the
delegate. As long as a listener can be invoked with the given arguments to yield
a result that is convertible to the given result type, everything works just
fine.
As a side note, members of classes may or may not be associated with instances.
If they are not, the first argument of the function type must be that of the
class on which the members operate and an instance of this class must obviously
be passed when invoking the delegate:
```cpp
entt::delegate<void(my_struct &, int)> delegate;
delegate.connect<&my_struct::f>();
my_struct instance;
delegate(instance, 42);
```
In this case, it's not possible to _deduce_ the function type since the first
argument doesn't necessarily have to be a reference (for example, it can be a
pointer, as well as a const reference).<br/>
Therefore, the function type must be declared explicitly for unbound members.
## Runtime arguments
The `delegate` class is meant to be used primarily with template arguments.
However, as a consequence of its design, it also offers minimal support for
runtime arguments.<br/>
When used like this, some features aren't supported though. In particular:
* Curried functions aren't accepted.
* Functions with an argument list that differs from that of the delegate aren't
supported.
* Return type and types of arguments **must** coincide with those of the
delegate and _being at least convertible_ isn't enough anymore.
Moreover, for a given function type `Ret(Args...)`, the signature of the
functions connected at runtime must necessarily be `Ret(const void *, Args...)`.
Runtime arguments can be passed both to the constructor of a delegate and to the
`connect` member function. An optional parameter is also accepted in both cases.
This argument is used to pass arbitrary user data back and forth as a
`const void *` upon invocation.<br/>
To connect a function to a delegate _in the hard way_:
```cpp
int func(const void *ptr, int i) { return *static_cast<const int *>(ptr) * i; }
const int value = 42;
// use the constructor ...
entt::delegate delegate{&func, &value};
// ... or the connect member function
delegate.connect(&func, &value);
```
The type of the delegate is deduced from the function if possible. In this case,
since the first argument is an implementation detail, the deduced function type
is `int(int)`.<br/>
Invoking a delegate built in this way follows the same rules as previously
explained.
## Lambda support
In general, the `delegate` class doesn't fully support lambda functions in all
their nuances. The reason is pretty simple: a `delegate` isn't a drop-in
replacement for an `std::function`. Instead, it tries to overcome the problems
with the latter.<br/>
That being said, non-capturing lambda functions are supported, even though some
features aren't available in this case.
This is a logical consequence of the support for connecting functions at
runtime. Therefore, lambda functions undergo the same rules and
limitations.<br/>
In fact, since non-capturing lambda functions decay to pointers to functions,
they can be used with a `delegate` as if they were _normal functions_ with
optional payload:
```cpp
my_struct instance;
// use the constructor ...
entt::delegate delegate{+[](const void *ptr, int value) {
return static_cast<const my_struct *>(ptr)->f(value);
}, &instance};
// ... or the connect member function
delegate.connect([](const void *ptr, int value) {
return static_cast<const my_struct *>(ptr)->f(value);
}, &instance);
```
As above, the first parameter (`const void *`) isn't part of the function type
of the delegate and is used to dispatch arbitrary user data back and forth. In
other terms, the function type of the delegate above is `int(int)`.
# Signals
Signal handlers work with references to classes, function pointers and pointers
to members. Listeners can be any kind of objects and users are in charge of
connecting and disconnecting them from a signal to avoid crashes due to
different lifetimes. On the other side, performance shouldn't be affected that
much by the presence of such a signal handler.<br/>
Signals make use of delegates internally and therefore they undergo the same
rules and offer similar functionalities. It may be a good idea to consult the
documentation of the `delegate` class for further information.
A signal handler is can be used as a private data member without exposing any
_publish_ functionality to the clients of a class.<br/>
The basic idea is to impose a clear separation between the signal itself and the
`sink` class, that is a tool to be used to connect and disconnect listeners on
the fly.
The API of a signal handler is straightforward. If a collector is supplied to
the signal when something is published, all the values returned by its listeners
are literally _collected_ and used later by the caller. Otherwise, the class
works just like a plain signal that emits events from time to time.<br/>
To create instances of signal handlers it's sufficient to provide the type of
function to which they refer:
```cpp
entt::sigh<void(int, char)> signal;
```
Signals offer all the basic functionalities required to know how many listeners
they contain (`size`) or if they contain at least a listener (`empty`), as well
as a function to use to swap handlers (`swap`).
Besides them, there are member functions to use both to connect and disconnect
listeners in all their forms by means of a sink:
```cpp
void foo(int, char) { /* ... */ }
struct listener {
void bar(const int &, char) { /* ... */ }
};
// ...
entt::sink sink{signal};
listener instance;
sink.connect<&foo>();
sink.connect<&listener::bar>(instance);
// ...
// disconnects a free function
sink.disconnect<&foo>();
// disconnect a member function of an instance
sink.disconnect<&listener::bar>(instance);
// disconnect all member functions of an instance, if any
sink.disconnect(instance);
// discards all listeners at once
sink.disconnect();
```
As shown above, listeners don't have to strictly follow the signature of the
signal. As long as a listener can be invoked with the given arguments to yield a
result that is convertible to the given return type, everything works just
fine.<br/>
It's also possible to connect a listener before other elements already contained
by the signal. The `before` function returns a `sink` object that is correctly
initialized for the purpose and can be used to connect one or more listeners in
order and in the desired position:
```cpp
sink.before<&foo>().connect<&listener::bar>(instance);
```
In all cases, the `connect` member function returns by default a `connection`
object to be used as an alternative to break a connection by means of its
`release` member function.<br/>
A `scoped_connection` can also be created from a connection. In this case, the
link is broken automatically as soon as the object goes out of scope.
Once listeners are attached (or even if there are no listeners at all), events
and data in general are published through a signal by means of the `publish`
member function:
```cpp
signal.publish(42, 'c');
```
To collect data, the `collect` member function is used instead:
```cpp
int f() { return 0; }
int g() { return 1; }
// ...
entt::sigh<int()> signal;
entt::sink sink{signal};
sink.connect<&f>();
sink.connect<&g>();
std::vector<int> vec{};
signal.collect([&vec](int value) { vec.push_back(value); });
assert(vec[0] == 0);
assert(vec[1] == 1);
```
A collector must expose a function operator that accepts as an argument a type
to which the return type of the listeners can be converted. Moreover, it can
optionally return a boolean value that is true to stop collecting data, false
otherwise. This way one can avoid calling all the listeners in case it isn't
necessary.<br/>
Functors can also be used in place of a lambda. Since the collector is copied
when invoking the `collect` member function, `std::ref` is the way to go in this
case:
```cpp
struct my_collector {
std::vector<int> vec{};
bool operator()(int v) {
vec.push_back(v);
return true;
}
};
// ...
my_collector collector;
signal.collect(std::ref(collector));
```
# Event dispatcher
The event dispatcher class allows users to trigger immediate events or to queue
and publish them all together later.<br/>
This class lazily instantiates its queues. Therefore, it's not necessary to
_announce_ the event types in advance:
```cpp
// define a general purpose dispatcher
entt::dispatcher dispatcher{};
```
A listener registered with a dispatcher is such that its type offers one or more
member functions that take arguments of type `Event &` for any type of event,
regardless of the return value.<br/>
These functions are linked directly via `connect` to a _sink_:
```cpp
struct an_event { int value; };
struct another_event {};
struct listener {
void receive(const an_event &) { /* ... */ }
void method(const another_event &) { /* ... */ }
};
// ...
listener listener;
dispatcher.sink<an_event>().connect<&listener::receive>(listener);
dispatcher.sink<another_event>().connect<&listener::method>(listener);
```
Note that connecting listeners within event handlers can result in undefined
behavior.<br/>
The `disconnect` member function is used to remove one listener at a time or all
of them at once:
```cpp
dispatcher.sink<an_event>().disconnect<&listener::receive>(listener);
dispatcher.sink<another_event>().disconnect(listener);
```
The `trigger` member function serves the purpose of sending an immediate event
to all the listeners registered so far:
```cpp
dispatcher.trigger(an_event{42});
dispatcher.trigger<another_event>();
```
Listeners are invoked immediately, order of execution isn't guaranteed. This
method can be used to push around urgent messages like an _is terminating_
notification on a mobile app.
On the other hand, the `enqueue` member function queues messages together and
helps to maintain control over the moment they are sent to listeners:
```cpp
dispatcher.enqueue<an_event>(42);
dispatcher.enqueue(another_event{});
```
Events are stored aside until the `update` member function is invoked:
```cpp
// emits all the events of the given type at once
dispatcher.update<an_event>();
// emits all the events queued so far at once
dispatcher.update();
```
This way users can embed the dispatcher in a loop and literally dispatch events
once per tick to their systems.
## Named queues
All queues within a dispatcher are associated by default with an event type and
then retrieved from it.<br/>
However, it's possible to create queues with different _names_ (and therefore
also multiple queues for a single type). In fact, more or less all functions
also take an additional parameter. As an example:
```cpp
dispatcher.sink<an_event>("custom"_hs).connect<&listener::receive>(listener);
```
In this case, the term _name_ is misused as these are actual numeric identifiers
of type `id_type`.<br/>
An exception to this rule is the `enqueue` function. There is no additional
parameter for it but rather a different function:
```cpp
dispatcher.enqueue_hint<an_event>("custom"_hs, 42);
```
This is mainly due to the template argument deduction rules and unfortunately
there is no real (elegant) way to avoid it.
# Event emitter
A general purpose event emitter thought mainly for those cases where it comes to
working with asynchronous stuff.<br/>
Originally designed to fit the requirements of
[`uvw`](https://github.com/skypjack/uvw) (a wrapper for `libuv` written in
modern C++), it was adapted later to be included in this library.
To create an emitter type, derived classes must inherit from the base as:
```cpp
struct my_emitter: emitter<my_emitter> {
// ...
}
```
Handlers for the different events are created internally on the fly. It's not
required to specify in advance the full list of accepted events.<br/>
Moreover, whenever an event is published, an emitter also passes a reference
to itself to its listeners.
To create new instances of an emitter, no arguments are required:
```cpp
my_emitter emitter{};
```
Listeners are movable and callable objects (free functions, lambdas, functors,
`std::function`s, whatever) whose function type is compatible with:
```cpp
void(Type &, my_emitter &)
```
Where `Type` is the type of event they want to receive.<br/>
To attach a listener to an emitter, there exists the `on` member function:
```cpp
emitter.on<my_event>([](const my_event &event, my_emitter &emitter) {
// ...
});
```
Similarly, the `reset` member function is used to disconnect listeners given a
type while `clear` is used to disconnect all listeners at once:
```cpp
// resets the listener for my_event
emitter.erase<my_event>();
// resets all listeners
emitter.clear()
```
To send an event to the listener registered on a given type, the `publish`
function is the way to go:
```cpp
struct my_event { int i; };
// ...
emitter.publish(my_event{42});
```
Finally, the `empty` member function tests if there exists at least a listener
registered with the event emitter while `contains` is used to check if a given
event type is associated with a valid listener:
```cpp
if(emitter.contains<my_event>()) {
// ...
}
```
This class introduces a _nice-to-have_ model based on events and listeners.<br/>
More in general, it's a handy tool when the derived classes _wrap_ asynchronous
operations but it's not limited to such uses.

107
external/entt/entt/docs/md/unreal.md vendored Normal file
View File

@ -0,0 +1,107 @@
# EnTT and Unreal Engine
<!--
@cond TURN_OFF_DOXYGEN
-->
# Table of Contents
* [Enable Cpp17](#enable-cpp17)
* [EnTT as a third party module](#entt-as-a-third-party-module)
* [Include EnTT](#include-entt)
<!--
@endcond TURN_OFF_DOXYGEN
-->
## Enable Cpp17
As of writing (Unreal Engine v4.25), the default C++ standard of Unreal Engine
is C++14.<br/>
On the other hand, note that `EnTT` requires C++17 to compile. To enable it, in
the main module of the project there should be a `<Game Name>.Build.cs` file,
the constructor of which must contain the following lines:
```cs
PCHUsage = PCHUsageMode.NoSharedPCHs;
PrivatePCHHeaderFile = "<PCH filename>.h";
CppStandard = CppStandardVersion.Cpp17;
```
Replace `<PCH filename>.h` with the name of the already existing PCH header
file, if any.<br/>
In case the project doesn't already contain a file of this type, it's possible
to create one with the following content:
```cpp
#pragma once
#include "CoreMinimal.h"
```
Remember to remove any old `PCHUsage = <...>` line that was previously there. At
this point, C++17 support should be in place.<br/>
Try to compile the project to ensure it works as expected before following
further steps.
Note that updating a *project* to C++17 doesn't necessarily mean that the IDE in
use will also start to recognize its syntax.<br/>
If the plan is to use C++17 in the project too, check the specific instructions
for the IDE in use.
## EnTT as a third party module
Once this point is reached, the `Source` directory should look like this:
```
Source
| MyGame.Target.cs
| MyGameEditor.Target.cs
|
+---MyGame
| | MyGame.Build.cs
| | MyGame.h (PCH Header file)
|
\---ThirdParty
\---EnTT
| EnTT.Build.cs
|
\---entt (GitHub repository content inside)
```
To make this happen, create the folder `ThirdParty` under `Source` if it doesn't
exist already. Then, add an `EnTT` folder under `ThirdParty`.<br/>
Within the latter, create a new file `EnTT.Build.cs` with the following content:
```cs
using System.IO;
using UnrealBuildTool;
public class EnTT: ModuleRules {
public EnTT(ReadOnlyTargetRules Target) : base(Target) {
Type = ModuleType.External;
PublicIncludePaths.Add(Path.Combine(ModuleDirectory, "entt", "src", "entt"));
}
}
```
The last line indicates that the actual files will be found in the directory
`EnTT/entt/src/entt`.<br/>
Download the repository for `EnTT` and place it next to `EnTT.Build.cs` or
update the path above accordingly.
Finally, open the `<Game Name>.Build.cs` file and add `EnTT` as a dependency at
the end of the list:
```cs
PublicDependencyModuleNames.AddRange(new[] {
"Core", "CoreUObject", "Engine", "InputCore", [...], "EnTT"
});
```
Note that some IDEs might require a restart to start recognizing the new module
for code-highlighting features and such.
## Include EnTT
In any source file of the project, add `#include "entt.hpp"` or any other path
to the file from `EnTT` to use it.<br/>
Try to create a registry as `entt::registry registry;` to make sure everything
compiles fine.

34
external/entt/entt/entt.imp vendored Normal file
View File

@ -0,0 +1,34 @@
[
{ "include": [ "@<gtest/internal/.*>", "private", "<gtest/gtest.h>", "public" ] },
{ "include": [ "@<gtest/gtest-.*>", "private", "<gtest/gtest.h>", "public" ] },
{ "include": [ "@[\"<].*/container/fwd.hpp[\">]", "private", "<entt/container/dense_map.hpp>", "public" ] },
{ "include": [ "@[\"<].*/container/fwd.hpp[\">]", "private", "<entt/container/dense_set.hpp>", "public" ] },
{ "include": [ "@[\"<].*/core/fwd.hpp[\">]", "private", "<entt/core/any.hpp>", "public" ] },
{ "include": [ "@[\"<].*/core/fwd.hpp[\">]", "private", "<entt/core/family.hpp>", "public" ] },
{ "include": [ "@[\"<].*/core/fwd.hpp[\">]", "private", "<entt/core/hashed_string.hpp>", "public" ] },
{ "include": [ "@[\"<].*/core/fwd.hpp[\">]", "private", "<entt/core/ident.hpp>", "public" ] },
{ "include": [ "@[\"<].*/core/fwd.hpp[\">]", "private", "<entt/core/monostate.hpp>", "public" ] },
{ "include": [ "@[\"<].*/core/fwd.hpp[\">]", "private", "<entt/core/type_info.hpp>", "public" ] },
{ "include": [ "@[\"<].*/core/fwd.hpp[\">]", "private", "<entt/core/type_traits.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/entity.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/group.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/handle.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/helper.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/observer.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/organizer.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/registry.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/runtime_view.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/snapshot.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/sparse_set.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/storage.hpp>", "public" ] },
{ "include": [ "@[\"<].*/entity/fwd.hpp[\">]", "private", "<entt/entity/view.hpp>", "public" ] },
{ "include": [ "@[\"<].*/meta/fwd.hpp[\">]", "private", "<entt/meta/meta.hpp>", "public" ] },
{ "include": [ "@[\"<].*/poly/fwd.hpp[\">]", "private", "<entt/poly/poly.hpp>", "public" ] },
{ "include": [ "@[\"<].*/resource/fwd.hpp[\">]", "private", "<entt/resource/cache.hpp>", "public" ] },
{ "include": [ "@[\"<].*/resource/fwd.hpp[\">]", "private", "<entt/resource/loader.hpp>", "public" ] },
{ "include": [ "@[\"<].*/resource/fwd.hpp[\">]", "private", "<entt/resource/resource.hpp>", "public" ] },
{ "include": [ "@[\"<].*/signal/fwd.hpp[\">]", "private", "<entt/signal/delegate.hpp>", "public" ] },
{ "include": [ "@[\"<].*/signal/fwd.hpp[\">]", "private", "<entt/signal/dispatcher.hpp>", "public" ] },
{ "include": [ "@[\"<].*/signal/fwd.hpp[\">]", "private", "<entt/signal/emitter.hpp>", "public" ] },
{ "include": [ "@[\"<].*/signal/fwd.hpp[\">]", "private", "<entt/signal/sigh.hpp>", "public" ] }
]

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
</AutoVisualizer>

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::dense_map&lt;*&gt;">
<Intrinsic Name="size" Expression="packed.first_base::value.size()"/>
<Intrinsic Name="bucket_count" Expression="sparse.first_base::value.size()"/>
<DisplayString>{{ size={ size() } }}</DisplayString>
<Expand>
<Item Name="[capacity]" ExcludeView="simple">packed.first_base::value.capacity()</Item>
<Item Name="[bucket_count]" ExcludeView="simple">bucket_count()</Item>
<Item Name="[load_factor]" ExcludeView="simple">(float)size() / (float)bucket_count()</Item>
<Item Name="[max_load_factor]" ExcludeView="simple">threshold</Item>
<IndexListItems>
<Size>size()</Size>
<ValueNode>packed.first_base::value[$i].element</ValueNode>
</IndexListItems>
</Expand>
</Type>
<Type Name="entt::dense_set&lt;*&gt;">
<Intrinsic Name="size" Expression="packed.first_base::value.size()"/>
<Intrinsic Name="bucket_count" Expression="sparse.first_base::value.size()"/>
<DisplayString>{{ size={ size() } }}</DisplayString>
<Expand>
<Item Name="[capacity]" ExcludeView="simple">packed.first_base::value.capacity()</Item>
<Item Name="[bucket_count]" ExcludeView="simple">bucket_count()</Item>
<Item Name="[load_factor]" ExcludeView="simple">(float)size() / (float)bucket_count()</Item>
<Item Name="[max_load_factor]" ExcludeView="simple">threshold</Item>
<IndexListItems>
<Size>size()</Size>
<ValueNode>packed.first_base::value[$i].second</ValueNode>
</IndexListItems>
</Expand>
</Type>
</AutoVisualizer>

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::basic_any&lt;*&gt;">
<DisplayString>{{ type={ info->alias,na }, policy={ mode,en } }}</DisplayString>
</Type>
<Type Name="entt::compressed_pair&lt;*&gt;">
<Intrinsic Name="first" Optional="true" Expression="((first_base*)this)->value"/>
<Intrinsic Name="first" Optional="true" Expression="*(first_base::base_type*)this"/>
<Intrinsic Name="second" Optional="true" Expression="((second_base*)this)->value"/>
<Intrinsic Name="second" Optional="true" Expression="*(second_base::base_type*)this"/>
<DisplayString >({ first() }, { second() })</DisplayString>
<Expand>
<Item Name="[first]">first()</Item>
<Item Name="[second]">second()</Item>
</Expand>
</Type>
<Type Name="entt::basic_hashed_string&lt;*&gt;">
<DisplayString Condition="base_type::repr != nullptr">{{ hash={ base_type::hash } }}</DisplayString>
<DisplayString>{{}}</DisplayString>
<Expand>
<Item Name="[data]">base_type::repr,na</Item>
<Item Name="[length]">base_type::length</Item>
</Expand>
</Type>
<Type Name="entt::type_info">
<DisplayString>{{ name={ alias,na } }}</DisplayString>
<Expand>
<Item Name="[hash]">identifier</Item>
<Item Name="[index]">seq</Item>
</Expand>
</Type>
</AutoVisualizer>

View File

@ -0,0 +1,153 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::basic_registry&lt;*&gt;">
<Intrinsic Name="pools_size" Expression="pools.packed.first_base::value.size()"/>
<Intrinsic Name="vars_size" Expression="vars.ctx.packed.first_base::value.size()"/>
<Intrinsic Name="to_entity" Expression="*((entity_traits::entity_type *)&amp;entity) &amp; entity_traits::entity_mask">
<Parameter Name="entity" Type="entity_traits::value_type &amp;"/>
</Intrinsic>
<DisplayString>{{ size={ epool.size() } }}</DisplayString>
<Expand>
<Item IncludeView="simple" Name="[epool]">epool,view(simple)nr</Item>
<Synthetic Name="[epool]" ExcludeView="simple">
<DisplayString>{ epool.size() }</DisplayString>
<Expand>
<CustomListItems>
<Variable Name="pos" InitialValue="0" />
<Variable Name="last" InitialValue="epool.size()"/>
<Loop>
<Break Condition="pos == last"/>
<If Condition="to_entity(epool[pos]) == pos">
<Item Name="[{ pos }]">epool[pos]</Item>
</If>
<Exec>++pos</Exec>
</Loop>
</CustomListItems>
</Expand>
</Synthetic>
<Synthetic Name="[destroyed]" ExcludeView="simple">
<DisplayString>{ to_entity(free_list) != entity_traits::entity_mask }</DisplayString>
<Expand>
<CustomListItems>
<Variable Name="it" InitialValue="to_entity(free_list)" />
<Loop>
<Break Condition="it == entity_traits::entity_mask"/>
<Item Name="[{ it }]">epool[it]</Item>
<Exec>it = to_entity(epool[it])</Exec>
</Loop>
</CustomListItems>
</Expand>
</Synthetic>
<Synthetic Name="[pools]">
<DisplayString>{ pools_size() }</DisplayString>
<Expand>
<IndexListItems ExcludeView="simple">
<Size>pools_size()</Size>
<ValueNode>*pools.packed.first_base::value[$i].element.second</ValueNode>
</IndexListItems>
<IndexListItems IncludeView="simple">
<Size>pools_size()</Size>
<ValueNode>*pools.packed.first_base::value[$i].element.second,view(simple)</ValueNode>
</IndexListItems>
</Expand>
</Synthetic>
<Item Name="[groups]" ExcludeView="simple">groups.size()</Item>
<Synthetic Name="[vars]">
<DisplayString>{ vars_size() }</DisplayString>
<Expand>
<IndexListItems>
<Size>vars_size()</Size>
<ValueNode>vars.ctx.packed.first_base::value[$i].element.second</ValueNode>
</IndexListItems>
</Expand>
</Synthetic>
</Expand>
</Type>
<Type Name="entt::basic_sparse_set&lt;*&gt;">
<DisplayString>{{ size={ packed.size() }, type={ info->alias,na } }}</DisplayString>
<Expand>
<Item Name="[capacity]" ExcludeView="simple">packed.capacity()</Item>
<Item Name="[policy]">mode,en</Item>
<Synthetic Name="[sparse]">
<DisplayString>{ sparse.size() * entity_traits::page_size }</DisplayString>
<Expand>
<ExpandedItem IncludeView="simple">sparse,view(simple)</ExpandedItem>
<CustomListItems ExcludeView="simple">
<Variable Name="pos" InitialValue="0"/>
<Variable Name="page" InitialValue="0"/>
<Variable Name="offset" InitialValue="0"/>
<Variable Name="last" InitialValue="sparse.size() * entity_traits::page_size"/>
<Loop>
<Break Condition="pos == last"/>
<Exec>page = pos / entity_traits::page_size</Exec>
<Exec>offset = pos &amp; (entity_traits::page_size - 1)</Exec>
<If Condition="sparse[page] &amp;&amp; (*((entity_traits::entity_type *)&amp;sparse[page][offset]) &lt; ~entity_traits::entity_mask)">
<Item Name="[{ pos }]">*((entity_traits::entity_type *)&amp;sparse[page][offset]) &amp; entity_traits::entity_mask</Item>
</If>
<Exec>++pos</Exec>
</Loop>
</CustomListItems>
</Expand>
</Synthetic>
<Synthetic Name="[packed]">
<DisplayString>{ packed.size() }</DisplayString>
<Expand>
<ExpandedItem IncludeView="simple">packed,view(simple)</ExpandedItem>
<CustomListItems ExcludeView="simple">
<Variable Name="pos" InitialValue="0"/>
<Variable Name="last" InitialValue="packed.size()"/>
<Loop>
<Break Condition="pos == last"/>
<If Condition="*((entity_traits::entity_type *)&amp;packed[pos]) &lt; ~entity_traits::entity_mask">
<Item Name="[{ pos }]">packed[pos]</Item>
</If>
<Exec>++pos</Exec>
</Loop>
</CustomListItems>
</Expand>
</Synthetic>
</Expand>
</Type>
<Type Name="entt::basic_storage&lt;*&gt;">
<DisplayString>{{ size={ base_type::packed.size() }, type={ base_type::info->alias,na } }}</DisplayString>
<Expand>
<Item Name="[capacity]" Optional="true" ExcludeView="simple">packed.first_base::value.capacity() * comp_traits::page_size</Item>
<Item Name="[page size]" Optional="true" ExcludeView="simple">comp_traits::page_size</Item>
<Item Name="[base]" ExcludeView="simple">(base_type*)this,nand</Item>
<Item Name="[base]" IncludeView="simple">(base_type*)this,view(simple)nand</Item>
<!-- having SFINAE-like techniques in natvis is priceless :) -->
<CustomListItems Condition="packed.first_base::value.size() != 0" Optional="true">
<Variable Name="pos" InitialValue="0" />
<Variable Name="last" InitialValue="base_type::packed.size()"/>
<Loop>
<Break Condition="pos == last"/>
<If Condition="*((base_type::entity_traits::entity_type *)&amp;base_type::packed[pos]) &lt; ~base_type::entity_traits::entity_mask">
<Item Name="[{ pos }:{ base_type::packed[pos] }]">packed.first_base::value[pos / comp_traits::page_size][pos &amp; (comp_traits::page_size - 1)]</Item>
</If>
<Exec>++pos</Exec>
</Loop>
</CustomListItems>
</Expand>
</Type>
<Type Name="entt::basic_view&lt;*&gt;">
<DisplayString>{{ size_hint={ view->packed.size() } }}</DisplayString>
<Expand>
<Item Name="[pools]">pools,na</Item>
<Item Name="[filter]">filter,na</Item>
</Expand>
</Type>
<Type Name="entt::basic_runtime_view&lt;*&gt;">
<DisplayString Condition="pools.size() != 0u">{{ size_hint={ pools[0]->packed.size() } }}</DisplayString>
<DisplayString>{{ size_hint=0 }}</DisplayString>
<Expand>
<Item Name="[pools]">pools,na</Item>
<Item Name="[filter]">filter,na</Item>
</Expand>
</Type>
<Type Name="entt::null_t">
<DisplayString>&lt;null&gt;</DisplayString>
</Type>
<Type Name="entt::tombstone_t">
<DisplayString>&lt;tombstone&gt;</DisplayString>
</Type>
</AutoVisualizer>

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::adjacency_matrix&lt;*&gt;">
<DisplayString>{{ size={ vert } }}</DisplayString>
<Expand>
<CustomListItems>
<Variable Name="pos" InitialValue="0" />
<Variable Name="last" InitialValue="vert * vert"/>
<Loop>
<Break Condition="pos == last"/>
<If Condition="matrix[pos] != 0u">
<Item Name="{pos / vert}">pos % vert</Item>
</If>
<Exec>++pos</Exec>
</Loop>
</CustomListItems>
</Expand>
</Type>
</AutoVisualizer>

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
</AutoVisualizer>

View File

@ -0,0 +1,121 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::internal::meta_base_node">
<DisplayString>{{}}</DisplayString>
</Type>
<Type Name="entt::internal::meta_conv_node">
<DisplayString>{{}}</DisplayString>
</Type>
<Type Name="entt::internal::meta_ctor_node">
<DisplayString>{{ arity={ arity } }}</DisplayString>
</Type>
<Type Name="entt::internal::meta_data_node">
<Intrinsic Name="has_property" Expression="!!(traits &amp; property)">
<Parameter Name="property" Type="int"/>
</Intrinsic>
<DisplayString>{{ arity={ arity } }}</DisplayString>
<Expand>
<Item Name="[arity]">arity</Item>
<Item Name="[is_const]">has_property(entt::internal::meta_traits::is_const)</Item>
<Item Name="[is_static]">has_property(entt::internal::meta_traits::is_static)</Item>
<Item Name="[prop]">prop</Item>
</Expand>
</Type>
<Type Name="entt::internal::meta_func_node" >
<Intrinsic Name="has_property" Expression="!!(traits &amp; property)">
<Parameter Name="property" Type="int"/>
</Intrinsic>
<DisplayString>{{ arity={ arity } }}</DisplayString>
<Expand>
<Item Name="[is_const]">has_property(entt::internal::meta_traits::is_const)</Item>
<Item Name="[is_static]">has_property(entt::internal::meta_traits::is_static)</Item>
<Item Name="[next]" Condition="next != nullptr">*next</Item>
<Item Name="[prop]">prop</Item>
</Expand>
</Type>
<Type Name="entt::internal::meta_prop_node">
<DisplayString>{ value }</DisplayString>
</Type>
<Type Name="entt::internal::meta_template_node">
<DisplayString>{{ arity={ arity } }}</DisplayString>
</Type>
<Type Name="entt::internal::meta_type_node">
<Intrinsic Name="has_property" Expression="!!(traits &amp; property)">
<Parameter Name="property" Type="int"/>
</Intrinsic>
<DisplayString Condition="info != nullptr">{{ type={ info->alias,na } }}</DisplayString>
<DisplayString>{{}}</DisplayString>
<Expand>
<Item Name="[id]">id</Item>
<Item Name="[sizeof]">size_of</Item>
<Item Name="[is_arithmetic]">has_property(entt::internal::meta_traits::is_arithmetic)</Item>
<Item Name="[is_integral]">has_property(entt::internal::meta_traits::is_integral)</Item>
<Item Name="[is_signed]">has_property(entt::internal::meta_traits::is_signed)</Item>
<Item Name="[is_array]">has_property(entt::internal::meta_traits::is_array)</Item>
<Item Name="[is_enum]">has_property(entt::internal::meta_traits::is_enum)</Item>
<Item Name="[is_class]">has_property(entt::internal::meta_traits::is_class)</Item>
<Item Name="[is_meta_pointer_like]">has_property(entt::internal::meta_traits::is_meta_pointer_like)</Item>
<Item Name="[is_meta_sequence_container]">has_property(entt::internal::meta_traits::is_meta_sequence_container)</Item>
<Item Name="[is_meta_associative_container]">has_property(entt::internal::meta_traits::is_meta_associative_container)</Item>
<Item Name="[default_constructor]">default_constructor != nullptr</Item>
<Item Name="[conversion_helper]">conversion_helper != nullptr</Item>
<Item Name="[from_void]">from_void != nullptr</Item>
<Item Name="[template_info]">templ</Item>
<Item Name="[details]" Condition="details != nullptr">*details</Item>
</Expand>
</Type>
<Type Name="entt::meta_any">
<DisplayString Condition="node.info != nullptr">{{ type={ node.info->alias,na }, policy={ storage.mode,en } }}</DisplayString>
<DisplayString>{{}}</DisplayString>
<Expand>
<ExpandedItem>node</ExpandedItem>
<Item Name="[context]" Condition="ctx != nullptr">ctx->value</Item>
</Expand>
</Type>
<Type Name="entt::meta_handle">
<DisplayString>{ any }</DisplayString>
</Type>
<Type Name="entt::meta_associative_container">
<DisplayString>{ storage }</DisplayString>
<Expand>
<Item Name="[context]" Condition="ctx != nullptr">ctx->value</Item>
</Expand>
</Type>
<Type Name="entt::meta_sequence_container">
<DisplayString>{ storage }</DisplayString>
<Expand>
<Item Name="[context]" Condition="ctx != nullptr">ctx->value</Item>
</Expand>
</Type>
<Type Name="entt::meta_data">
<DisplayString Condition="node != nullptr">{ *node }</DisplayString>
<DisplayString>{{}}</DisplayString>
<Expand>
<ExpandedItem Condition="node != nullptr">node</ExpandedItem>
<Item Name="[context]" Condition="ctx != nullptr">ctx->value</Item>
</Expand>
</Type>
<Type Name="entt::meta_func">
<DisplayString Condition="node != nullptr">{ *node }</DisplayString>
<DisplayString>{{}}</DisplayString>
<Expand>
<ExpandedItem Condition="node != nullptr">node</ExpandedItem>
<Item Name="[context]" Condition="ctx != nullptr">ctx->value</Item>
</Expand>
</Type>
<Type Name="entt::meta_prop">
<DisplayString Condition="node != nullptr">{ *node }</DisplayString>
<DisplayString>{{}}</DisplayString>
<Expand>
<ExpandedItem Condition="node != nullptr">node</ExpandedItem>
<Item Name="[context]" Condition="ctx != nullptr">ctx->value</Item>
</Expand>
</Type>
<Type Name="entt::meta_type">
<DisplayString>{ node }</DisplayString>
<Expand>
<ExpandedItem>node</ExpandedItem>
<Item Name="[context]" Condition="ctx != nullptr">ctx->value</Item>
</Expand>
</Type>
</AutoVisualizer>

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
</AutoVisualizer>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::basic_poly&lt;*&gt;">
<DisplayString>{ storage }</DisplayString>
</Type>
</AutoVisualizer>

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
</AutoVisualizer>

View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::resource&lt;*&gt;">
<DisplayString>{ value }</DisplayString>
<Expand>
<ExpandedItem>value</ExpandedItem>
</Expand>
</Type>
<Type Name="entt::resource_cache&lt;*&gt;">
<DisplayString>{ pool.first_base::value }</DisplayString>
<Expand>
<ExpandedItem>pool.first_base::value</ExpandedItem>
</Expand>
</Type>
</AutoVisualizer>

View File

@ -0,0 +1,56 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="entt::delegate&lt;*&gt;">
<DisplayString>{{ type={ "$T1" } }}</DisplayString>
<Expand>
<Item Name="[empty]">fn == nullptr</Item>
<Item Name="[data]">instance</Item>
</Expand>
</Type>
<Type Name="entt::basic_dispatcher&lt;*&gt;">
<Intrinsic Name="size" Expression="pools.first_base::value.packed.first_base::value.size()"/>
<DisplayString>{{ size={ size() } }}</DisplayString>
<Expand>
<Synthetic Name="[pools]">
<DisplayString>{ size() }</DisplayString>
<Expand>
<IndexListItems>
<Size>size()</Size>
<ValueNode>*pools.first_base::value.packed.first_base::value[$i].element.second</ValueNode>
</IndexListItems>
</Expand>
</Synthetic>
</Expand>
</Type>
<Type Name="entt::internal::dispatcher_handler&lt;*&gt;">
<DisplayString>{{ size={ events.size() }, event={ "$T1" } }}</DisplayString>
<Expand>
<Item Name="[signal]">signal</Item>
</Expand>
</Type>
<Type Name="entt::emitter&lt;*&gt;">
<DisplayString>{{ size={ handlers.first_base::value.packed.first_base::value.size() } }}</DisplayString>
</Type>
<Type Name="entt::connection">
<DisplayString>{{ bound={ signal != nullptr } }}</DisplayString>
</Type>
<Type Name="entt::scoped_connection">
<DisplayString>{ conn }</DisplayString>
</Type>
<Type Name="entt::sigh&lt;*&gt;">
<DisplayString>{{ size={ calls.size() }, type={ "$T1" } }}</DisplayString>
<Expand>
<IndexListItems>
<Size>calls.size()</Size>
<ValueNode>calls[$i]</ValueNode>
</IndexListItems>
</Expand>
</Type>
<Type Name="entt::sink&lt;*&gt;">
<DisplayString>{{ type={ "$T1" } }}</DisplayString>
<Expand>
<Item Name="[signal]">signal,na</Item>
<Item Name="[offset]">offset</Item>
</Expand>
</Type>
</AutoVisualizer>

299
external/entt/entt/scripts/amalgamate.py vendored Normal file
View File

@ -0,0 +1,299 @@
#!/usr/bin/env python
# coding=utf-8
# amalgamate.py - Amalgamate C source and header files.
# Copyright (c) 2012, Erik Edlund <erik.edlund@32767.se>
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Erik Edlund, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import datetime
import json
import os
import re
class Amalgamation(object):
# Prepends self.source_path to file_path if needed.
def actual_path(self, file_path):
if not os.path.isabs(file_path):
file_path = os.path.join(self.source_path, file_path)
return file_path
# Search included file_path in self.include_paths and
# in source_dir if specified.
def find_included_file(self, file_path, source_dir):
search_dirs = self.include_paths[:]
if source_dir:
search_dirs.insert(0, source_dir)
for search_dir in search_dirs:
search_path = os.path.join(search_dir, file_path)
if os.path.isfile(self.actual_path(search_path)):
return search_path
return None
def __init__(self, args):
with open(args.config, 'r') as f:
config = json.loads(f.read())
for key in config:
setattr(self, key, config[key])
self.verbose = args.verbose == "yes"
self.prologue = args.prologue
self.source_path = args.source_path
self.included_files = []
# Generate the amalgamation and write it to the target file.
def generate(self):
amalgamation = ""
if self.prologue:
with open(self.prologue, 'r') as f:
amalgamation += datetime.datetime.now().strftime(f.read())
if self.verbose:
print("Config:")
print(" target = {0}".format(self.target))
print(" working_dir = {0}".format(os.getcwd()))
print(" include_paths = {0}".format(self.include_paths))
print("Creating amalgamation:")
for file_path in self.sources:
# Do not check the include paths while processing the source
# list, all given source paths must be correct.
# actual_path = self.actual_path(file_path)
print(" - processing \"{0}\"".format(file_path))
t = TranslationUnit(file_path, self, True)
amalgamation += t.content
with open(self.target, 'w') as f:
f.write(amalgamation)
print("...done!\n")
if self.verbose:
print("Files processed: {0}".format(self.sources))
print("Files included: {0}".format(self.included_files))
print("")
def _is_within(match, matches):
for m in matches:
if match.start() > m.start() and \
match.end() < m.end():
return True
return False
class TranslationUnit(object):
# // C++ comment.
cpp_comment_pattern = re.compile(r"//.*?\n")
# /* C comment. */
c_comment_pattern = re.compile(r"/\*.*?\*/", re.S)
# "complex \"stri\\\ng\" value".
string_pattern = re.compile("[^']" r'".*?(?<=[^\\])"', re.S)
# Handle simple include directives. Support for advanced
# directives where macros and defines needs to expanded is
# not a concern right now.
include_pattern = re.compile(
r'#\s*include\s+(<|")(?P<path>.*?)("|>)', re.S)
# #pragma once
pragma_once_pattern = re.compile(r'#\s*pragma\s+once', re.S)
# Search for pattern in self.content, add the match to
# contexts if found and update the index accordingly.
def _search_content(self, index, pattern, contexts):
match = pattern.search(self.content, index)
if match:
contexts.append(match)
return match.end()
return index + 2
# Return all the skippable contexts, i.e., comments and strings
def _find_skippable_contexts(self):
# Find contexts in the content in which a found include
# directive should not be processed.
skippable_contexts = []
# Walk through the content char by char, and try to grab
# skippable contexts using regular expressions when found.
i = 1
content_len = len(self.content)
while i < content_len:
j = i - 1
current = self.content[i]
previous = self.content[j]
if current == '"':
# String value.
i = self._search_content(j, self.string_pattern,
skippable_contexts)
elif current == '*' and previous == '/':
# C style comment.
i = self._search_content(j, self.c_comment_pattern,
skippable_contexts)
elif current == '/' and previous == '/':
# C++ style comment.
i = self._search_content(j, self.cpp_comment_pattern,
skippable_contexts)
else:
# Skip to the next char.
i += 1
return skippable_contexts
# Returns True if the match is within list of other matches
# Removes pragma once from content
def _process_pragma_once(self):
content_len = len(self.content)
if content_len < len("#include <x>"):
return 0
# Find contexts in the content in which a found include
# directive should not be processed.
skippable_contexts = self._find_skippable_contexts()
pragmas = []
pragma_once_match = self.pragma_once_pattern.search(self.content)
while pragma_once_match:
if not _is_within(pragma_once_match, skippable_contexts):
pragmas.append(pragma_once_match)
pragma_once_match = self.pragma_once_pattern.search(self.content,
pragma_once_match.end())
# Handle all collected pragma once directives.
prev_end = 0
tmp_content = ''
for pragma_match in pragmas:
tmp_content += self.content[prev_end:pragma_match.start()]
prev_end = pragma_match.end()
tmp_content += self.content[prev_end:]
self.content = tmp_content
# Include all trivial #include directives into self.content.
def _process_includes(self):
content_len = len(self.content)
if content_len < len("#include <x>"):
return 0
# Find contexts in the content in which a found include
# directive should not be processed.
skippable_contexts = self._find_skippable_contexts()
# Search for include directives in the content, collect those
# which should be included into the content.
includes = []
include_match = self.include_pattern.search(self.content)
while include_match:
if not _is_within(include_match, skippable_contexts):
include_path = include_match.group("path")
search_same_dir = include_match.group(1) == '"'
found_included_path = self.amalgamation.find_included_file(
include_path, self.file_dir if search_same_dir else None)
if found_included_path:
includes.append((include_match, found_included_path))
include_match = self.include_pattern.search(self.content,
include_match.end())
# Handle all collected include directives.
prev_end = 0
tmp_content = ''
for include in includes:
include_match, found_included_path = include
tmp_content += self.content[prev_end:include_match.start()]
tmp_content += "// {0}\n".format(include_match.group(0))
if found_included_path not in self.amalgamation.included_files:
t = TranslationUnit(found_included_path, self.amalgamation, False)
tmp_content += t.content
prev_end = include_match.end()
tmp_content += self.content[prev_end:]
self.content = tmp_content
return len(includes)
# Make all content processing
def _process(self):
if not self.is_root:
self._process_pragma_once()
self._process_includes()
def __init__(self, file_path, amalgamation, is_root):
self.file_path = file_path
self.file_dir = os.path.dirname(file_path)
self.amalgamation = amalgamation
self.is_root = is_root
self.amalgamation.included_files.append(self.file_path)
actual_path = self.amalgamation.actual_path(file_path)
if not os.path.isfile(actual_path):
raise IOError("File not found: \"{0}\"".format(file_path))
with open(actual_path, 'r') as f:
self.content = f.read()
self._process()
def main():
description = "Amalgamate C source and header files."
usage = " ".join([
"amalgamate.py",
"[-v]",
"-c path/to/config.json",
"-s path/to/source/dir",
"[-p path/to/prologue.(c|h)]"
])
argsparser = argparse.ArgumentParser(
description=description, usage=usage)
argsparser.add_argument("-v", "--verbose", dest="verbose",
choices=["yes", "no"], metavar="", help="be verbose")
argsparser.add_argument("-c", "--config", dest="config",
required=True, metavar="", help="path to a JSON config file")
argsparser.add_argument("-s", "--source", dest="source_path",
required=True, metavar="", help="source code path")
argsparser.add_argument("-p", "--prologue", dest="prologue",
required=False, metavar="", help="path to a C prologue file")
amalgamation = Amalgamation(argsparser.parse_args())
amalgamation.generate()
if __name__ == "__main__":
main()

View File

@ -0,0 +1,8 @@
{
"project": "entt",
"target": "single_include/entt/entt.hpp",
"sources": [
"src/entt/entt.hpp"
],
"include_paths": ["src"]
}

60
external/entt/entt/scripts/update_homebrew.sh vendored Executable file
View File

@ -0,0 +1,60 @@
#!/bin/sh
# only argument should be the version to upgrade to
if [ $# != 1 ]
then
echo "Expected a version tag like v2.7.1"
exit 1
fi
VERSION="$1"
URL="https://github.com/skypjack/entt/archive/$VERSION.tar.gz"
FORMULA="entt.rb"
echo "Updating homebrew package to $VERSION"
echo "Cloning..."
git clone https://github.com/skypjack/homebrew-entt.git
if [ $? != 0 ]
then
exit 1
fi
cd homebrew-entt
# download the repo at the version
# exit with error messages if curl fails
echo "Curling..."
curl "$URL" --location --fail --silent --show-error --output archive.tar.gz
if [ $? != 0 ]
then
exit 1
fi
# compute sha256 hash
echo "Hashing..."
HASH="$(openssl sha256 archive.tar.gz | cut -d " " -f 2)"
# delete the archive
rm archive.tar.gz
echo "Sedding..."
# change the url in the formula file
# the slashes in the URL must be escaped
ESCAPED_URL="$(echo "$URL" | sed -e 's/[\/&]/\\&/g')"
sed -i -e '/url/s/".*"/"'$ESCAPED_URL'"/' $FORMULA
# change the hash in the formula file
sed -i -e '/sha256/s/".*"/"'$HASH'"/' $FORMULA
# delete temporary file created by sed
rm -rf "$FORMULA-e"
# update remote repo
echo "Gitting..."
git add entt.rb
git commit -m "Update to $VERSION"
git push origin master
# out of homebrew-entt dir
cd ..

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,81 @@
#ifndef ENTT_CONFIG_CONFIG_H
#define ENTT_CONFIG_CONFIG_H
#include "version.h"
#if defined(__cpp_exceptions) && !defined(ENTT_NOEXCEPTION)
# define ENTT_CONSTEXPR
# define ENTT_THROW throw
# define ENTT_TRY try
# define ENTT_CATCH catch(...)
#else
# define ENTT_CONSTEXPR constexpr // use only with throwing functions (waiting for C++20)
# define ENTT_THROW
# define ENTT_TRY if(true)
# define ENTT_CATCH if(false)
#endif
#ifdef ENTT_USE_ATOMIC
# include <atomic>
# define ENTT_MAYBE_ATOMIC(Type) std::atomic<Type>
#else
# define ENTT_MAYBE_ATOMIC(Type) Type
#endif
#ifndef ENTT_ID_TYPE
# include <cstdint>
# define ENTT_ID_TYPE std::uint32_t
#endif
#ifndef ENTT_SPARSE_PAGE
# define ENTT_SPARSE_PAGE 4096
#endif
#ifndef ENTT_PACKED_PAGE
# define ENTT_PACKED_PAGE 1024
#endif
#ifdef ENTT_DISABLE_ASSERT
# undef ENTT_ASSERT
# define ENTT_ASSERT(condition, msg) (void(0))
#elif !defined ENTT_ASSERT
# include <cassert>
# define ENTT_ASSERT(condition, msg) assert(condition)
#endif
#ifdef ENTT_DISABLE_ASSERT
# undef ENTT_ASSERT_CONSTEXPR
# define ENTT_ASSERT_CONSTEXPR(condition, msg) (void(0))
#elif !defined ENTT_ASSERT_CONSTEXPR
# define ENTT_ASSERT_CONSTEXPR(condition, msg) ENTT_ASSERT(condition, msg)
#endif
#ifdef ENTT_NO_ETO
# define ENTT_ETO_TYPE(Type) void
#else
# define ENTT_ETO_TYPE(Type) Type
#endif
#ifdef ENTT_STANDARD_CPP
# define ENTT_NONSTD false
#else
# define ENTT_NONSTD true
# if defined __clang__ || defined __GNUC__
# define ENTT_PRETTY_FUNCTION __PRETTY_FUNCTION__
# define ENTT_PRETTY_FUNCTION_PREFIX '='
# define ENTT_PRETTY_FUNCTION_SUFFIX ']'
# elif defined _MSC_VER
# define ENTT_PRETTY_FUNCTION __FUNCSIG__
# define ENTT_PRETTY_FUNCTION_PREFIX '<'
# define ENTT_PRETTY_FUNCTION_SUFFIX '>'
# endif
#endif
#if defined _MSC_VER
# pragma detect_mismatch("entt.version", ENTT_VERSION)
# pragma detect_mismatch("entt.noexcept", ENTT_XSTR(ENTT_TRY))
# pragma detect_mismatch("entt.id", ENTT_XSTR(ENTT_ID_TYPE))
# pragma detect_mismatch("entt.nonstd", ENTT_XSTR(ENTT_NONSTD))
#endif
#endif

View File

@ -0,0 +1,7 @@
#ifndef ENTT_CONFIG_MACRO_H
#define ENTT_CONFIG_MACRO_H
#define ENTT_STR(arg) #arg
#define ENTT_XSTR(arg) ENTT_STR(arg)
#endif

View File

@ -0,0 +1,14 @@
#ifndef ENTT_CONFIG_VERSION_H
#define ENTT_CONFIG_VERSION_H
#include "macro.h"
#define ENTT_VERSION_MAJOR 3
#define ENTT_VERSION_MINOR 11
#define ENTT_VERSION_PATCH 1
#define ENTT_VERSION \
ENTT_XSTR(ENTT_VERSION_MAJOR) \
"." ENTT_XSTR(ENTT_VERSION_MINOR) "." ENTT_XSTR(ENTT_VERSION_PATCH)
#endif

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,895 @@
#ifndef ENTT_CONTAINER_DENSE_SET_HPP
#define ENTT_CONTAINER_DENSE_SET_HPP
#include <cmath>
#include <cstddef>
#include <functional>
#include <iterator>
#include <limits>
#include <memory>
#include <tuple>
#include <type_traits>
#include <utility>
#include <vector>
#include "../config/config.h"
#include "../core/compressed_pair.hpp"
#include "../core/memory.hpp"
#include "../core/type_traits.hpp"
#include "fwd.hpp"
namespace entt {
/**
* @cond TURN_OFF_DOXYGEN
* Internal details not to be documented.
*/
namespace internal {
template<typename It>
class dense_set_iterator final {
template<typename>
friend class dense_set_iterator;
public:
using value_type = typename It::value_type::second_type;
using pointer = const value_type *;
using reference = const value_type &;
using difference_type = std::ptrdiff_t;
using iterator_category = std::random_access_iterator_tag;
constexpr dense_set_iterator() noexcept
: it{} {}
constexpr dense_set_iterator(const It iter) noexcept
: it{iter} {}
template<typename Other, typename = std::enable_if_t<!std::is_same_v<It, Other> && std::is_constructible_v<It, Other>>>
constexpr dense_set_iterator(const dense_set_iterator<Other> &other) noexcept
: it{other.it} {}
constexpr dense_set_iterator &operator++() noexcept {
return ++it, *this;
}
constexpr dense_set_iterator operator++(int) noexcept {
dense_set_iterator orig = *this;
return ++(*this), orig;
}
constexpr dense_set_iterator &operator--() noexcept {
return --it, *this;
}
constexpr dense_set_iterator operator--(int) noexcept {
dense_set_iterator orig = *this;
return operator--(), orig;
}
constexpr dense_set_iterator &operator+=(const difference_type value) noexcept {
it += value;
return *this;
}
constexpr dense_set_iterator operator+(const difference_type value) const noexcept {
dense_set_iterator copy = *this;
return (copy += value);
}
constexpr dense_set_iterator &operator-=(const difference_type value) noexcept {
return (*this += -value);
}
constexpr dense_set_iterator operator-(const difference_type value) const noexcept {
return (*this + -value);
}
[[nodiscard]] constexpr reference operator[](const difference_type value) const noexcept {
return it[value].second;
}
[[nodiscard]] constexpr pointer operator->() const noexcept {
return std::addressof(it->second);
}
[[nodiscard]] constexpr reference operator*() const noexcept {
return *operator->();
}
template<typename ILhs, typename IRhs>
friend constexpr std::ptrdiff_t operator-(const dense_set_iterator<ILhs> &, const dense_set_iterator<IRhs> &) noexcept;
template<typename ILhs, typename IRhs>
friend constexpr bool operator==(const dense_set_iterator<ILhs> &, const dense_set_iterator<IRhs> &) noexcept;
template<typename ILhs, typename IRhs>
friend constexpr bool operator<(const dense_set_iterator<ILhs> &, const dense_set_iterator<IRhs> &) noexcept;
private:
It it;
};
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr std::ptrdiff_t operator-(const dense_set_iterator<ILhs> &lhs, const dense_set_iterator<IRhs> &rhs) noexcept {
return lhs.it - rhs.it;
}
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator==(const dense_set_iterator<ILhs> &lhs, const dense_set_iterator<IRhs> &rhs) noexcept {
return lhs.it == rhs.it;
}
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator!=(const dense_set_iterator<ILhs> &lhs, const dense_set_iterator<IRhs> &rhs) noexcept {
return !(lhs == rhs);
}
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator<(const dense_set_iterator<ILhs> &lhs, const dense_set_iterator<IRhs> &rhs) noexcept {
return lhs.it < rhs.it;
}
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator>(const dense_set_iterator<ILhs> &lhs, const dense_set_iterator<IRhs> &rhs) noexcept {
return rhs < lhs;
}
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator<=(const dense_set_iterator<ILhs> &lhs, const dense_set_iterator<IRhs> &rhs) noexcept {
return !(lhs > rhs);
}
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator>=(const dense_set_iterator<ILhs> &lhs, const dense_set_iterator<IRhs> &rhs) noexcept {
return !(lhs < rhs);
}
template<typename It>
class dense_set_local_iterator final {
template<typename>
friend class dense_set_local_iterator;
public:
using value_type = typename It::value_type::second_type;
using pointer = const value_type *;
using reference = const value_type &;
using difference_type = std::ptrdiff_t;
using iterator_category = std::forward_iterator_tag;
constexpr dense_set_local_iterator() noexcept
: it{},
offset{} {}
constexpr dense_set_local_iterator(It iter, const std::size_t pos) noexcept
: it{iter},
offset{pos} {}
template<typename Other, typename = std::enable_if_t<!std::is_same_v<It, Other> && std::is_constructible_v<It, Other>>>
constexpr dense_set_local_iterator(const dense_set_local_iterator<Other> &other) noexcept
: it{other.it},
offset{other.offset} {}
constexpr dense_set_local_iterator &operator++() noexcept {
return offset = it[offset].first, *this;
}
constexpr dense_set_local_iterator operator++(int) noexcept {
dense_set_local_iterator orig = *this;
return ++(*this), orig;
}
[[nodiscard]] constexpr pointer operator->() const noexcept {
return std::addressof(it[offset].second);
}
[[nodiscard]] constexpr reference operator*() const noexcept {
return *operator->();
}
[[nodiscard]] constexpr std::size_t index() const noexcept {
return offset;
}
private:
It it;
std::size_t offset;
};
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator==(const dense_set_local_iterator<ILhs> &lhs, const dense_set_local_iterator<IRhs> &rhs) noexcept {
return lhs.index() == rhs.index();
}
template<typename ILhs, typename IRhs>
[[nodiscard]] constexpr bool operator!=(const dense_set_local_iterator<ILhs> &lhs, const dense_set_local_iterator<IRhs> &rhs) noexcept {
return !(lhs == rhs);
}
} // namespace internal
/**
* Internal details not to be documented.
* @endcond
*/
/**
* @brief Associative container for unique objects of a given type.
*
* Internally, elements are organized into buckets. Which bucket an element is
* placed into depends entirely on its hash. Elements with the same hash code
* appear in the same bucket.
*
* @tparam Type Value type of the associative container.
* @tparam Hash Type of function to use to hash the values.
* @tparam KeyEqual Type of function to use to compare the values for equality.
* @tparam Allocator Type of allocator used to manage memory and elements.
*/
template<typename Type, typename Hash, typename KeyEqual, typename Allocator>
class dense_set {
static constexpr float default_threshold = 0.875f;
static constexpr std::size_t minimum_capacity = 8u;
using node_type = std::pair<std::size_t, Type>;
using alloc_traits = std::allocator_traits<Allocator>;
static_assert(std::is_same_v<typename alloc_traits::value_type, Type>, "Invalid value type");
using sparse_container_type = std::vector<std::size_t, typename alloc_traits::template rebind_alloc<std::size_t>>;
using packed_container_type = std::vector<node_type, typename alloc_traits::template rebind_alloc<node_type>>;
template<typename Other>
[[nodiscard]] std::size_t value_to_bucket(const Other &value) const noexcept {
return fast_mod(static_cast<size_type>(sparse.second()(value)), bucket_count());
}
template<typename Other>
[[nodiscard]] auto constrained_find(const Other &value, std::size_t bucket) {
for(auto it = begin(bucket), last = end(bucket); it != last; ++it) {
if(packed.second()(*it, value)) {
return begin() + static_cast<typename iterator::difference_type>(it.index());
}
}
return end();
}
template<typename Other>
[[nodiscard]] auto constrained_find(const Other &value, std::size_t bucket) const {
for(auto it = cbegin(bucket), last = cend(bucket); it != last; ++it) {
if(packed.second()(*it, value)) {
return cbegin() + static_cast<typename iterator::difference_type>(it.index());
}
}
return cend();
}
template<typename Other>
[[nodiscard]] auto insert_or_do_nothing(Other &&value) {
const auto index = value_to_bucket(value);
if(auto it = constrained_find(value, index); it != end()) {
return std::make_pair(it, false);
}
packed.first().emplace_back(sparse.first()[index], std::forward<Other>(value));
sparse.first()[index] = packed.first().size() - 1u;
rehash_if_required();
return std::make_pair(--end(), true);
}
void move_and_pop(const std::size_t pos) {
if(const auto last = size() - 1u; pos != last) {
size_type *curr = sparse.first().data() + value_to_bucket(packed.first().back().second);
packed.first()[pos] = std::move(packed.first().back());
for(; *curr != last; curr = &packed.first()[*curr].first) {}
*curr = pos;
}
packed.first().pop_back();
}
void rehash_if_required() {
if(size() > (bucket_count() * max_load_factor())) {
rehash(bucket_count() * 2u);
}
}
public:
/*! @brief Key type of the container. */
using key_type = Type;
/*! @brief Value type of the container. */
using value_type = Type;
/*! @brief Unsigned integer type. */
using size_type = std::size_t;
/*! @brief Type of function to use to hash the elements. */
using hasher = Hash;
/*! @brief Type of function to use to compare the elements for equality. */
using key_equal = KeyEqual;
/*! @brief Allocator type. */
using allocator_type = Allocator;
/*! @brief Random access iterator type. */
using iterator = internal::dense_set_iterator<typename packed_container_type::iterator>;
/*! @brief Constant random access iterator type. */
using const_iterator = internal::dense_set_iterator<typename packed_container_type::const_iterator>;
/*! @brief Forward iterator type. */
using local_iterator = internal::dense_set_local_iterator<typename packed_container_type::iterator>;
/*! @brief Constant forward iterator type. */
using const_local_iterator = internal::dense_set_local_iterator<typename packed_container_type::const_iterator>;
/*! @brief Default constructor. */
dense_set()
: dense_set{minimum_capacity} {}
/**
* @brief Constructs an empty container with a given allocator.
* @param allocator The allocator to use.
*/
explicit dense_set(const allocator_type &allocator)
: dense_set{minimum_capacity, hasher{}, key_equal{}, allocator} {}
/**
* @brief Constructs an empty container with a given allocator and user
* supplied minimal number of buckets.
* @param cnt Minimal number of buckets.
* @param allocator The allocator to use.
*/
dense_set(const size_type cnt, const allocator_type &allocator)
: dense_set{cnt, hasher{}, key_equal{}, allocator} {}
/**
* @brief Constructs an empty container with a given allocator, hash
* function and user supplied minimal number of buckets.
* @param cnt Minimal number of buckets.
* @param hash Hash function to use.
* @param allocator The allocator to use.
*/
dense_set(const size_type cnt, const hasher &hash, const allocator_type &allocator)
: dense_set{cnt, hash, key_equal{}, allocator} {}
/**
* @brief Constructs an empty container with a given allocator, hash
* function, compare function and user supplied minimal number of buckets.
* @param cnt Minimal number of buckets.
* @param hash Hash function to use.
* @param equal Compare function to use.
* @param allocator The allocator to use.
*/
explicit dense_set(const size_type cnt, const hasher &hash = hasher{}, const key_equal &equal = key_equal{}, const allocator_type &allocator = allocator_type{})
: sparse{allocator, hash},
packed{allocator, equal},
threshold{default_threshold} {
rehash(cnt);
}
/*! @brief Default copy constructor. */
dense_set(const dense_set &) = default;
/**
* @brief Allocator-extended copy constructor.
* @param other The instance to copy from.
* @param allocator The allocator to use.
*/
dense_set(const dense_set &other, const allocator_type &allocator)
: sparse{std::piecewise_construct, std::forward_as_tuple(other.sparse.first(), allocator), std::forward_as_tuple(other.sparse.second())},
packed{std::piecewise_construct, std::forward_as_tuple(other.packed.first(), allocator), std::forward_as_tuple(other.packed.second())},
threshold{other.threshold} {}
/*! @brief Default move constructor. */
dense_set(dense_set &&) noexcept(std::is_nothrow_move_constructible_v<compressed_pair<sparse_container_type, hasher>> &&std::is_nothrow_move_constructible_v<compressed_pair<packed_container_type, key_equal>>) = default;
/**
* @brief Allocator-extended move constructor.
* @param other The instance to move from.
* @param allocator The allocator to use.
*/
dense_set(dense_set &&other, const allocator_type &allocator)
: sparse{std::piecewise_construct, std::forward_as_tuple(std::move(other.sparse.first()), allocator), std::forward_as_tuple(std::move(other.sparse.second()))},
packed{std::piecewise_construct, std::forward_as_tuple(std::move(other.packed.first()), allocator), std::forward_as_tuple(std::move(other.packed.second()))},
threshold{other.threshold} {}
/**
* @brief Default copy assignment operator.
* @return This container.
*/
dense_set &operator=(const dense_set &) = default;
/**
* @brief Default move assignment operator.
* @return This container.
*/
dense_set &operator=(dense_set &&) noexcept(std::is_nothrow_move_assignable_v<compressed_pair<sparse_container_type, hasher>> &&std::is_nothrow_move_assignable_v<compressed_pair<packed_container_type, key_equal>>) = default;
/**
* @brief Returns the associated allocator.
* @return The associated allocator.
*/
[[nodiscard]] constexpr allocator_type get_allocator() const noexcept {
return sparse.first().get_allocator();
}
/**
* @brief Returns an iterator to the beginning.
*
* The returned iterator points to the first instance of the internal array.
* If the array is empty, the returned iterator will be equal to `end()`.
*
* @return An iterator to the first instance of the internal array.
*/
[[nodiscard]] const_iterator cbegin() const noexcept {
return packed.first().begin();
}
/*! @copydoc cbegin */
[[nodiscard]] const_iterator begin() const noexcept {
return cbegin();
}
/*! @copydoc begin */
[[nodiscard]] iterator begin() noexcept {
return packed.first().begin();
}
/**
* @brief Returns an iterator to the end.
*
* The returned iterator points to the element following the last instance
* of the internal array. Attempting to dereference the returned iterator
* results in undefined behavior.
*
* @return An iterator to the element following the last instance of the
* internal array.
*/
[[nodiscard]] const_iterator cend() const noexcept {
return packed.first().end();
}
/*! @copydoc cend */
[[nodiscard]] const_iterator end() const noexcept {
return cend();
}
/*! @copydoc end */
[[nodiscard]] iterator end() noexcept {
return packed.first().end();
}
/**
* @brief Checks whether a container is empty.
* @return True if the container is empty, false otherwise.
*/
[[nodiscard]] bool empty() const noexcept {
return packed.first().empty();
}
/**
* @brief Returns the number of elements in a container.
* @return Number of elements in a container.
*/
[[nodiscard]] size_type size() const noexcept {
return packed.first().size();
}
/**
* @brief Returns the maximum possible number of elements.
* @return Maximum possible number of elements.
*/
[[nodiscard]] size_type max_size() const noexcept {
return packed.first().max_size();
}
/*! @brief Clears the container. */
void clear() noexcept {
sparse.first().clear();
packed.first().clear();
rehash(0u);
}
/**
* @brief Inserts an element into the container, if it does not exist.
* @param value An element to insert into the container.
* @return A pair consisting of an iterator to the inserted element (or to
* the element that prevented the insertion) and a bool denoting whether the
* insertion took place.
*/
std::pair<iterator, bool> insert(const value_type &value) {
return insert_or_do_nothing(value);
}
/*! @copydoc insert */
std::pair<iterator, bool> insert(value_type &&value) {
return insert_or_do_nothing(std::move(value));
}
/**
* @brief Inserts elements into the container, if they do not exist.
* @tparam It Type of input iterator.
* @param first An iterator to the first element of the range of elements.
* @param last An iterator past the last element of the range of elements.
*/
template<typename It>
void insert(It first, It last) {
for(; first != last; ++first) {
insert(*first);
}
}
/**
* @brief Constructs an element in-place, if it does not exist.
*
* The element is also constructed when the container already has the key,
* in which case the newly constructed object is destroyed immediately.
*
* @tparam Args Types of arguments to forward to the constructor of the
* element.
* @param args Arguments to forward to the constructor of the element.
* @return A pair consisting of an iterator to the inserted element (or to
* the element that prevented the insertion) and a bool denoting whether the
* insertion took place.
*/
template<typename... Args>
std::pair<iterator, bool> emplace(Args &&...args) {
if constexpr(((sizeof...(Args) == 1u) && ... && std::is_same_v<std::decay_t<Args>, value_type>)) {
return insert_or_do_nothing(std::forward<Args>(args)...);
} else {
auto &node = packed.first().emplace_back(std::piecewise_construct, std::make_tuple(packed.first().size()), std::forward_as_tuple(std::forward<Args>(args)...));
const auto index = value_to_bucket(node.second);
if(auto it = constrained_find(node.second, index); it != end()) {
packed.first().pop_back();
return std::make_pair(it, false);
}
std::swap(node.first, sparse.first()[index]);
rehash_if_required();
return std::make_pair(--end(), true);
}
}
/**
* @brief Removes an element from a given position.
* @param pos An iterator to the element to remove.
* @return An iterator following the removed element.
*/
iterator erase(const_iterator pos) {
const auto diff = pos - cbegin();
erase(*pos);
return begin() + diff;
}
/**
* @brief Removes the given elements from a container.
* @param first An iterator to the first element of the range of elements.
* @param last An iterator past the last element of the range of elements.
* @return An iterator following the last removed element.
*/
iterator erase(const_iterator first, const_iterator last) {
const auto dist = first - cbegin();
for(auto from = last - cbegin(); from != dist; --from) {
erase(packed.first()[from - 1u].second);
}
return (begin() + dist);
}
/**
* @brief Removes the element associated with a given value.
* @param value Value of an element to remove.
* @return Number of elements removed (either 0 or 1).
*/
size_type erase(const value_type &value) {
for(size_type *curr = sparse.first().data() + value_to_bucket(value); *curr != (std::numeric_limits<size_type>::max)(); curr = &packed.first()[*curr].first) {
if(packed.second()(packed.first()[*curr].second, value)) {
const auto index = *curr;
*curr = packed.first()[*curr].first;
move_and_pop(index);
return 1u;
}
}
return 0u;
}
/**
* @brief Exchanges the contents with those of a given container.
* @param other Container to exchange the content with.
*/
void swap(dense_set &other) {
using std::swap;
swap(sparse, other.sparse);
swap(packed, other.packed);
swap(threshold, other.threshold);
}
/**
* @brief Returns the number of elements matching a value (either 1 or 0).
* @param key Key value of an element to search for.
* @return Number of elements matching the key (either 1 or 0).
*/
[[nodiscard]] size_type count(const value_type &key) const {
return find(key) != end();
}
/**
* @brief Returns the number of elements matching a key (either 1 or 0).
* @tparam Other Type of the key value of an element to search for.
* @param key Key value of an element to search for.
* @return Number of elements matching the key (either 1 or 0).
*/
template<typename Other>
[[nodiscard]] std::enable_if_t<is_transparent_v<hasher> && is_transparent_v<key_equal>, std::conditional_t<false, Other, size_type>>
count(const Other &key) const {
return find(key) != end();
}
/**
* @brief Finds an element with a given value.
* @param value Value of an element to search for.
* @return An iterator to an element with the given value. If no such
* element is found, a past-the-end iterator is returned.
*/
[[nodiscard]] iterator find(const value_type &value) {
return constrained_find(value, value_to_bucket(value));
}
/*! @copydoc find */
[[nodiscard]] const_iterator find(const value_type &value) const {
return constrained_find(value, value_to_bucket(value));
}
/**
* @brief Finds an element that compares _equivalent_ to a given value.
* @tparam Other Type of an element to search for.
* @param value Value of an element to search for.
* @return An iterator to an element with the given value. If no such
* element is found, a past-the-end iterator is returned.
*/
template<typename Other>
[[nodiscard]] std::enable_if_t<is_transparent_v<hasher> && is_transparent_v<key_equal>, std::conditional_t<false, Other, iterator>>
find(const Other &value) {
return constrained_find(value, value_to_bucket(value));
}
/*! @copydoc find */
template<typename Other>
[[nodiscard]] std::enable_if_t<is_transparent_v<hasher> && is_transparent_v<key_equal>, std::conditional_t<false, Other, const_iterator>>
find(const Other &value) const {
return constrained_find(value, value_to_bucket(value));
}
/**
* @brief Returns a range containing all elements with a given value.
* @param value Value of an element to search for.
* @return A pair of iterators pointing to the first element and past the
* last element of the range.
*/
[[nodiscard]] std::pair<iterator, iterator> equal_range(const value_type &value) {
const auto it = find(value);
return {it, it + !(it == end())};
}
/*! @copydoc equal_range */
[[nodiscard]] std::pair<const_iterator, const_iterator> equal_range(const value_type &value) const {
const auto it = find(value);
return {it, it + !(it == cend())};
}
/**
* @brief Returns a range containing all elements that compare _equivalent_
* to a given value.
* @tparam Other Type of an element to search for.
* @param value Value of an element to search for.
* @return A pair of iterators pointing to the first element and past the
* last element of the range.
*/
template<typename Other>
[[nodiscard]] std::enable_if_t<is_transparent_v<hasher> && is_transparent_v<key_equal>, std::conditional_t<false, Other, std::pair<iterator, iterator>>>
equal_range(const Other &value) {
const auto it = find(value);
return {it, it + !(it == end())};
}
/*! @copydoc equal_range */
template<class Other>
[[nodiscard]] std::enable_if_t<is_transparent_v<hasher> && is_transparent_v<key_equal>, std::conditional_t<false, Other, std::pair<const_iterator, const_iterator>>>
equal_range(const Other &value) const {
const auto it = find(value);
return {it, it + !(it == cend())};
}
/**
* @brief Checks if the container contains an element with a given value.
* @param value Value of an element to search for.
* @return True if there is such an element, false otherwise.
*/
[[nodiscard]] bool contains(const value_type &value) const {
return (find(value) != cend());
}
/**
* @brief Checks if the container contains an element that compares
* _equivalent_ to a given value.
* @tparam Other Type of an element to search for.
* @param value Value of an element to search for.
* @return True if there is such an element, false otherwise.
*/
template<typename Other>
[[nodiscard]] std::enable_if_t<is_transparent_v<hasher> && is_transparent_v<key_equal>, std::conditional_t<false, Other, bool>>
contains(const Other &value) const {
return (find(value) != cend());
}
/**
* @brief Returns an iterator to the beginning of a given bucket.
* @param index An index of a bucket to access.
* @return An iterator to the beginning of the given bucket.
*/
[[nodiscard]] const_local_iterator cbegin(const size_type index) const {
return {packed.first().begin(), sparse.first()[index]};
}
/**
* @brief Returns an iterator to the beginning of a given bucket.
* @param index An index of a bucket to access.
* @return An iterator to the beginning of the given bucket.
*/
[[nodiscard]] const_local_iterator begin(const size_type index) const {
return cbegin(index);
}
/**
* @brief Returns an iterator to the beginning of a given bucket.
* @param index An index of a bucket to access.
* @return An iterator to the beginning of the given bucket.
*/
[[nodiscard]] local_iterator begin(const size_type index) {
return {packed.first().begin(), sparse.first()[index]};
}
/**
* @brief Returns an iterator to the end of a given bucket.
* @param index An index of a bucket to access.
* @return An iterator to the end of the given bucket.
*/
[[nodiscard]] const_local_iterator cend([[maybe_unused]] const size_type index) const {
return {packed.first().begin(), (std::numeric_limits<size_type>::max)()};
}
/**
* @brief Returns an iterator to the end of a given bucket.
* @param index An index of a bucket to access.
* @return An iterator to the end of the given bucket.
*/
[[nodiscard]] const_local_iterator end(const size_type index) const {
return cend(index);
}
/**
* @brief Returns an iterator to the end of a given bucket.
* @param index An index of a bucket to access.
* @return An iterator to the end of the given bucket.
*/
[[nodiscard]] local_iterator end([[maybe_unused]] const size_type index) {
return {packed.first().begin(), (std::numeric_limits<size_type>::max)()};
}
/**
* @brief Returns the number of buckets.
* @return The number of buckets.
*/
[[nodiscard]] size_type bucket_count() const {
return sparse.first().size();
}
/**
* @brief Returns the maximum number of buckets.
* @return The maximum number of buckets.
*/
[[nodiscard]] size_type max_bucket_count() const {
return sparse.first().max_size();
}
/**
* @brief Returns the number of elements in a given bucket.
* @param index The index of the bucket to examine.
* @return The number of elements in the given bucket.
*/
[[nodiscard]] size_type bucket_size(const size_type index) const {
return static_cast<size_type>(std::distance(begin(index), end(index)));
}
/**
* @brief Returns the bucket for a given element.
* @param value The value of the element to examine.
* @return The bucket for the given element.
*/
[[nodiscard]] size_type bucket(const value_type &value) const {
return value_to_bucket(value);
}
/**
* @brief Returns the average number of elements per bucket.
* @return The average number of elements per bucket.
*/
[[nodiscard]] float load_factor() const {
return size() / static_cast<float>(bucket_count());
}
/**
* @brief Returns the maximum average number of elements per bucket.
* @return The maximum average number of elements per bucket.
*/
[[nodiscard]] float max_load_factor() const {
return threshold;
}
/**
* @brief Sets the desired maximum average number of elements per bucket.
* @param value A desired maximum average number of elements per bucket.
*/
void max_load_factor(const float value) {
ENTT_ASSERT(value > 0.f, "Invalid load factor");
threshold = value;
rehash(0u);
}
/**
* @brief Reserves at least the specified number of buckets and regenerates
* the hash table.
* @param cnt New number of buckets.
*/
void rehash(const size_type cnt) {
auto value = cnt > minimum_capacity ? cnt : minimum_capacity;
const auto cap = static_cast<size_type>(size() / max_load_factor());
value = value > cap ? value : cap;
if(const auto sz = next_power_of_two(value); sz != bucket_count()) {
sparse.first().resize(sz);
for(auto &&elem: sparse.first()) {
elem = std::numeric_limits<size_type>::max();
}
for(size_type pos{}, last = size(); pos < last; ++pos) {
const auto index = value_to_bucket(packed.first()[pos].second);
packed.first()[pos].first = std::exchange(sparse.first()[index], pos);
}
}
}
/**
* @brief Reserves space for at least the specified number of elements and
* regenerates the hash table.
* @param cnt New number of elements.
*/
void reserve(const size_type cnt) {
packed.first().reserve(cnt);
rehash(static_cast<size_type>(std::ceil(cnt / max_load_factor())));
}
/**
* @brief Returns the function used to hash the elements.
* @return The function used to hash the elements.
*/
[[nodiscard]] hasher hash_function() const {
return sparse.second();
}
/**
* @brief Returns the function used to compare elements for equality.
* @return The function used to compare elements for equality.
*/
[[nodiscard]] key_equal key_eq() const {
return packed.second();
}
private:
compressed_pair<sparse_container_type, hasher> sparse;
compressed_pair<packed_container_type, key_equal> packed;
float threshold;
};
} // namespace entt
#endif

View File

@ -0,0 +1,26 @@
#ifndef ENTT_CONTAINER_FWD_HPP
#define ENTT_CONTAINER_FWD_HPP
#include <functional>
#include <memory>
namespace entt {
template<
typename Key,
typename Type,
typename = std::hash<Key>,
typename = std::equal_to<Key>,
typename = std::allocator<std::pair<const Key, Type>>>
class dense_map;
template<
typename Type,
typename = std::hash<Type>,
typename = std::equal_to<Type>,
typename = std::allocator<Type>>
class dense_set;
} // namespace entt
#endif

View File

@ -0,0 +1,137 @@
#ifndef ENTT_CORE_ALGORITHM_HPP
#define ENTT_CORE_ALGORITHM_HPP
#include <algorithm>
#include <functional>
#include <iterator>
#include <utility>
#include <vector>
#include "utility.hpp"
namespace entt {
/**
* @brief Function object to wrap `std::sort` in a class type.
*
* Unfortunately, `std::sort` cannot be passed as template argument to a class
* template or a function template.<br/>
* This class fills the gap by wrapping some flavors of `std::sort` in a
* function object.
*/
struct std_sort {
/**
* @brief Sorts the elements in a range.
*
* Sorts the elements in a range using the given binary comparison function.
*
* @tparam It Type of random access iterator.
* @tparam Compare Type of comparison function object.
* @tparam Args Types of arguments to forward to the sort function.
* @param first An iterator to the first element of the range to sort.
* @param last An iterator past the last element of the range to sort.
* @param compare A valid comparison function object.
* @param args Arguments to forward to the sort function, if any.
*/
template<typename It, typename Compare = std::less<>, typename... Args>
void operator()(It first, It last, Compare compare = Compare{}, Args &&...args) const {
std::sort(std::forward<Args>(args)..., std::move(first), std::move(last), std::move(compare));
}
};
/*! @brief Function object for performing insertion sort. */
struct insertion_sort {
/**
* @brief Sorts the elements in a range.
*
* Sorts the elements in a range using the given binary comparison function.
*
* @tparam It Type of random access iterator.
* @tparam Compare Type of comparison function object.
* @param first An iterator to the first element of the range to sort.
* @param last An iterator past the last element of the range to sort.
* @param compare A valid comparison function object.
*/
template<typename It, typename Compare = std::less<>>
void operator()(It first, It last, Compare compare = Compare{}) const {
if(first < last) {
for(auto it = first + 1; it < last; ++it) {
auto value = std::move(*it);
auto pre = it;
for(; pre > first && compare(value, *(pre - 1)); --pre) {
*pre = std::move(*(pre - 1));
}
*pre = std::move(value);
}
}
}
};
/**
* @brief Function object for performing LSD radix sort.
* @tparam Bit Number of bits processed per pass.
* @tparam N Maximum number of bits to sort.
*/
template<std::size_t Bit, std::size_t N>
struct radix_sort {
static_assert((N % Bit) == 0, "The maximum number of bits to sort must be a multiple of the number of bits processed per pass");
/**
* @brief Sorts the elements in a range.
*
* Sorts the elements in a range using the given _getter_ to access the
* actual data to be sorted.
*
* This implementation is inspired by the online book
* [Physically Based Rendering](http://www.pbr-book.org/3ed-2018/Primitives_and_Intersection_Acceleration/Bounding_Volume_Hierarchies.html#RadixSort).
*
* @tparam It Type of random access iterator.
* @tparam Getter Type of _getter_ function object.
* @param first An iterator to the first element of the range to sort.
* @param last An iterator past the last element of the range to sort.
* @param getter A valid _getter_ function object.
*/
template<typename It, typename Getter = identity>
void operator()(It first, It last, Getter getter = Getter{}) const {
if(first < last) {
static constexpr auto mask = (1 << Bit) - 1;
static constexpr auto buckets = 1 << Bit;
static constexpr auto passes = N / Bit;
using value_type = typename std::iterator_traits<It>::value_type;
std::vector<value_type> aux(std::distance(first, last));
auto part = [getter = std::move(getter)](auto from, auto to, auto out, auto start) {
std::size_t index[buckets]{};
std::size_t count[buckets]{};
for(auto it = from; it != to; ++it) {
++count[(getter(*it) >> start) & mask];
}
for(std::size_t pos{}, end = buckets - 1u; pos < end; ++pos) {
index[pos + 1u] = index[pos] + count[pos];
}
for(auto it = from; it != to; ++it) {
out[index[(getter(*it) >> start) & mask]++] = std::move(*it);
}
};
for(std::size_t pass = 0; pass < (passes & ~1); pass += 2) {
part(first, last, aux.begin(), pass * Bit);
part(aux.begin(), aux.end(), first, (pass + 1) * Bit);
}
if constexpr(passes & 1) {
part(first, last, aux.begin(), (passes - 1) * Bit);
std::move(aux.begin(), aux.end(), first);
}
}
}
};
} // namespace entt
#endif

510
external/entt/entt/src/entt/core/any.hpp vendored Normal file
View File

@ -0,0 +1,510 @@
#ifndef ENTT_CORE_ANY_HPP
#define ENTT_CORE_ANY_HPP
#include <cstddef>
#include <memory>
#include <type_traits>
#include <utility>
#include "../config/config.h"
#include "../core/utility.hpp"
#include "fwd.hpp"
#include "type_info.hpp"
#include "type_traits.hpp"
namespace entt {
/**
* @cond TURN_OFF_DOXYGEN
* Internal details not to be documented.
*/
namespace internal {
enum class any_operation : std::uint8_t {
copy,
move,
transfer,
assign,
destroy,
compare,
get
};
enum class any_policy : std::uint8_t {
owner,
ref,
cref
};
} // namespace internal
/**
* Internal details not to be documented.
* @endcond
*/
/**
* @brief A SBO friendly, type-safe container for single values of any type.
* @tparam Len Size of the storage reserved for the small buffer optimization.
* @tparam Align Optional alignment requirement.
*/
template<std::size_t Len, std::size_t Align>
class basic_any {
using operation = internal::any_operation;
using policy = internal::any_policy;
using vtable_type = const void *(const operation, const basic_any &, const void *);
struct storage_type {
alignas(Align) std::byte data[Len + !Len];
};
template<typename Type>
static constexpr bool in_situ = Len && alignof(Type) <= Align && sizeof(Type) <= Len &&std::is_nothrow_move_constructible_v<Type>;
template<typename Type>
static const void *basic_vtable(const operation op, const basic_any &value, const void *other) {
static_assert(!std::is_same_v<Type, void> && std::is_same_v<std::remove_cv_t<std::remove_reference_t<Type>>, Type>, "Invalid type");
const Type *element = nullptr;
if constexpr(in_situ<Type>) {
element = value.owner() ? reinterpret_cast<const Type *>(&value.storage) : static_cast<const Type *>(value.instance);
} else {
element = static_cast<const Type *>(value.instance);
}
switch(op) {
case operation::copy:
if constexpr(std::is_copy_constructible_v<Type>) {
static_cast<basic_any *>(const_cast<void *>(other))->initialize<Type>(*element);
}
break;
case operation::move:
if constexpr(in_situ<Type>) {
if(value.owner()) {
return new(&static_cast<basic_any *>(const_cast<void *>(other))->storage) Type{std::move(*const_cast<Type *>(element))};
}
}
return (static_cast<basic_any *>(const_cast<void *>(other))->instance = std::exchange(const_cast<basic_any &>(value).instance, nullptr));
case operation::transfer:
if constexpr(std::is_move_assignable_v<Type>) {
*const_cast<Type *>(element) = std::move(*static_cast<Type *>(const_cast<void *>(other)));
return other;
}
[[fallthrough]];
case operation::assign:
if constexpr(std::is_copy_assignable_v<Type>) {
*const_cast<Type *>(element) = *static_cast<const Type *>(other);
return other;
}
break;
case operation::destroy:
if constexpr(in_situ<Type>) {
element->~Type();
} else if constexpr(std::is_array_v<Type>) {
delete[] element;
} else {
delete element;
}
break;
case operation::compare:
if constexpr(!std::is_function_v<Type> && !std::is_array_v<Type> && is_equality_comparable_v<Type>) {
return *element == *static_cast<const Type *>(other) ? other : nullptr;
} else {
return (element == other) ? other : nullptr;
}
case operation::get:
return element;
}
return nullptr;
}
template<typename Type, typename... Args>
void initialize([[maybe_unused]] Args &&...args) {
info = &type_id<std::remove_cv_t<std::remove_reference_t<Type>>>();
if constexpr(!std::is_void_v<Type>) {
vtable = basic_vtable<std::remove_cv_t<std::remove_reference_t<Type>>>;
if constexpr(std::is_lvalue_reference_v<Type>) {
static_assert(sizeof...(Args) == 1u && (std::is_lvalue_reference_v<Args> && ...), "Invalid arguments");
mode = std::is_const_v<std::remove_reference_t<Type>> ? policy::cref : policy::ref;
instance = (std::addressof(args), ...);
} else if constexpr(in_situ<std::remove_cv_t<std::remove_reference_t<Type>>>) {
if constexpr(sizeof...(Args) != 0u && std::is_aggregate_v<std::remove_cv_t<std::remove_reference_t<Type>>>) {
new(&storage) std::remove_cv_t<std::remove_reference_t<Type>>{std::forward<Args>(args)...};
} else {
new(&storage) std::remove_cv_t<std::remove_reference_t<Type>>(std::forward<Args>(args)...);
}
} else {
if constexpr(sizeof...(Args) != 0u && std::is_aggregate_v<std::remove_cv_t<std::remove_reference_t<Type>>>) {
instance = new std::remove_cv_t<std::remove_reference_t<Type>>{std::forward<Args>(args)...};
} else {
instance = new std::remove_cv_t<std::remove_reference_t<Type>>(std::forward<Args>(args)...);
}
}
}
}
basic_any(const basic_any &other, const policy pol) noexcept
: instance{other.data()},
info{other.info},
vtable{other.vtable},
mode{pol} {}
public:
/*! @brief Size of the internal storage. */
static constexpr auto length = Len;
/*! @brief Alignment requirement. */
static constexpr auto alignment = Align;
/*! @brief Default constructor. */
constexpr basic_any() noexcept
: basic_any{std::in_place_type<void>} {}
/**
* @brief Constructs a wrapper by directly initializing the new object.
* @tparam Type Type of object to use to initialize the wrapper.
* @tparam Args Types of arguments to use to construct the new instance.
* @param args Parameters to use to construct the instance.
*/
template<typename Type, typename... Args>
explicit basic_any(std::in_place_type_t<Type>, Args &&...args)
: instance{},
info{},
vtable{},
mode{policy::owner} {
initialize<Type>(std::forward<Args>(args)...);
}
/**
* @brief Constructs a wrapper from a given value.
* @tparam Type Type of object to use to initialize the wrapper.
* @param value An instance of an object to use to initialize the wrapper.
*/
template<typename Type, typename = std::enable_if_t<!std::is_same_v<std::decay_t<Type>, basic_any>>>
basic_any(Type &&value)
: basic_any{std::in_place_type<std::decay_t<Type>>, std::forward<Type>(value)} {}
/**
* @brief Copy constructor.
* @param other The instance to copy from.
*/
basic_any(const basic_any &other)
: basic_any{} {
if(other.vtable) {
other.vtable(operation::copy, other, this);
}
}
/**
* @brief Move constructor.
* @param other The instance to move from.
*/
basic_any(basic_any &&other) noexcept
: instance{},
info{other.info},
vtable{other.vtable},
mode{other.mode} {
if(other.vtable) {
other.vtable(operation::move, other, this);
}
}
/*! @brief Frees the internal storage, whatever it means. */
~basic_any() {
if(vtable && owner()) {
vtable(operation::destroy, *this, nullptr);
}
}
/**
* @brief Copy assignment operator.
* @param other The instance to copy from.
* @return This any object.
*/
basic_any &operator=(const basic_any &other) {
reset();
if(other.vtable) {
other.vtable(operation::copy, other, this);
}
return *this;
}
/**
* @brief Move assignment operator.
* @param other The instance to move from.
* @return This any object.
*/
basic_any &operator=(basic_any &&other) noexcept {
reset();
if(other.vtable) {
other.vtable(operation::move, other, this);
info = other.info;
vtable = other.vtable;
mode = other.mode;
}
return *this;
}
/**
* @brief Value assignment operator.
* @tparam Type Type of object to use to initialize the wrapper.
* @param value An instance of an object to use to initialize the wrapper.
* @return This any object.
*/
template<typename Type>
std::enable_if_t<!std::is_same_v<std::decay_t<Type>, basic_any>, basic_any &>
operator=(Type &&value) {
emplace<std::decay_t<Type>>(std::forward<Type>(value));
return *this;
}
/**
* @brief Returns the object type if any, `type_id<void>()` otherwise.
* @return The object type if any, `type_id<void>()` otherwise.
*/
[[nodiscard]] const type_info &type() const noexcept {
return *info;
}
/**
* @brief Returns an opaque pointer to the contained instance.
* @return An opaque pointer the contained instance, if any.
*/
[[nodiscard]] const void *data() const noexcept {
return vtable ? vtable(operation::get, *this, nullptr) : nullptr;
}
/**
* @brief Returns an opaque pointer to the contained instance.
* @param req Expected type.
* @return An opaque pointer the contained instance, if any.
*/
[[nodiscard]] const void *data(const type_info &req) const noexcept {
return *info == req ? data() : nullptr;
}
/**
* @brief Returns an opaque pointer to the contained instance.
* @return An opaque pointer the contained instance, if any.
*/
[[nodiscard]] void *data() noexcept {
return mode == policy::cref ? nullptr : const_cast<void *>(std::as_const(*this).data());
}
/**
* @brief Returns an opaque pointer to the contained instance.
* @param req Expected type.
* @return An opaque pointer the contained instance, if any.
*/
[[nodiscard]] void *data(const type_info &req) noexcept {
return mode == policy::cref ? nullptr : const_cast<void *>(std::as_const(*this).data(req));
}
/**
* @brief Replaces the contained object by creating a new instance directly.
* @tparam Type Type of object to use to initialize the wrapper.
* @tparam Args Types of arguments to use to construct the new instance.
* @param args Parameters to use to construct the instance.
*/
template<typename Type, typename... Args>
void emplace(Args &&...args) {
reset();
initialize<Type>(std::forward<Args>(args)...);
}
/**
* @brief Assigns a value to the contained object without replacing it.
* @param other The value to assign to the contained object.
* @return True in case of success, false otherwise.
*/
bool assign(const basic_any &other) {
if(vtable && mode != policy::cref && *info == *other.info) {
return (vtable(operation::assign, *this, other.data()) != nullptr);
}
return false;
}
/*! @copydoc assign */
bool assign(basic_any &&other) {
if(vtable && mode != policy::cref && *info == *other.info) {
if(auto *val = other.data(); val) {
return (vtable(operation::transfer, *this, val) != nullptr);
} else {
return (vtable(operation::assign, *this, std::as_const(other).data()) != nullptr);
}
}
return false;
}
/*! @brief Destroys contained object */
void reset() {
if(vtable && owner()) {
vtable(operation::destroy, *this, nullptr);
}
// unnecessary but it helps to detect nasty bugs
ENTT_ASSERT((instance = nullptr) == nullptr, "");
info = &type_id<void>();
vtable = nullptr;
mode = policy::owner;
}
/**
* @brief Returns false if a wrapper is empty, true otherwise.
* @return False if the wrapper is empty, true otherwise.
*/
[[nodiscard]] explicit operator bool() const noexcept {
return vtable != nullptr;
}
/**
* @brief Checks if two wrappers differ in their content.
* @param other Wrapper with which to compare.
* @return False if the two objects differ in their content, true otherwise.
*/
[[nodiscard]] bool operator==(const basic_any &other) const noexcept {
if(vtable && *info == *other.info) {
return (vtable(operation::compare, *this, other.data()) != nullptr);
}
return (!vtable && !other.vtable);
}
/**
* @brief Checks if two wrappers differ in their content.
* @param other Wrapper with which to compare.
* @return True if the two objects differ in their content, false otherwise.
*/
[[nodiscard]] bool operator!=(const basic_any &other) const noexcept {
return !(*this == other);
}
/**
* @brief Aliasing constructor.
* @return A wrapper that shares a reference to an unmanaged object.
*/
[[nodiscard]] basic_any as_ref() noexcept {
return basic_any{*this, (mode == policy::cref ? policy::cref : policy::ref)};
}
/*! @copydoc as_ref */
[[nodiscard]] basic_any as_ref() const noexcept {
return basic_any{*this, policy::cref};
}
/**
* @brief Returns true if a wrapper owns its object, false otherwise.
* @return True if the wrapper owns its object, false otherwise.
*/
[[nodiscard]] bool owner() const noexcept {
return (mode == policy::owner);
}
private:
union {
const void *instance;
storage_type storage;
};
const type_info *info;
vtable_type *vtable;
policy mode;
};
/**
* @brief Performs type-safe access to the contained object.
* @tparam Type Type to which conversion is required.
* @tparam Len Size of the storage reserved for the small buffer optimization.
* @tparam Align Alignment requirement.
* @param data Target any object.
* @return The element converted to the requested type.
*/
template<typename Type, std::size_t Len, std::size_t Align>
Type any_cast(const basic_any<Len, Align> &data) noexcept {
const auto *const instance = any_cast<std::remove_reference_t<Type>>(&data);
ENTT_ASSERT(instance, "Invalid instance");
return static_cast<Type>(*instance);
}
/*! @copydoc any_cast */
template<typename Type, std::size_t Len, std::size_t Align>
Type any_cast(basic_any<Len, Align> &data) noexcept {
// forces const on non-reference types to make them work also with wrappers for const references
auto *const instance = any_cast<std::remove_reference_t<const Type>>(&data);
ENTT_ASSERT(instance, "Invalid instance");
return static_cast<Type>(*instance);
}
/*! @copydoc any_cast */
template<typename Type, std::size_t Len, std::size_t Align>
Type any_cast(basic_any<Len, Align> &&data) noexcept {
if constexpr(std::is_copy_constructible_v<std::remove_cv_t<std::remove_reference_t<Type>>>) {
if(auto *const instance = any_cast<std::remove_reference_t<Type>>(&data); instance) {
return static_cast<Type>(std::move(*instance));
} else {
return any_cast<Type>(data);
}
} else {
auto *const instance = any_cast<std::remove_reference_t<Type>>(&data);
ENTT_ASSERT(instance, "Invalid instance");
return static_cast<Type>(std::move(*instance));
}
}
/*! @copydoc any_cast */
template<typename Type, std::size_t Len, std::size_t Align>
const Type *any_cast(const basic_any<Len, Align> *data) noexcept {
const auto &info = type_id<std::remove_cv_t<Type>>();
return static_cast<const Type *>(data->data(info));
}
/*! @copydoc any_cast */
template<typename Type, std::size_t Len, std::size_t Align>
Type *any_cast(basic_any<Len, Align> *data) noexcept {
if constexpr(std::is_const_v<Type>) {
// last attempt to make wrappers for const references return their values
return any_cast<Type>(&std::as_const(*data));
} else {
const auto &info = type_id<std::remove_cv_t<Type>>();
return static_cast<Type *>(data->data(info));
}
}
/**
* @brief Constructs a wrapper from a given type, passing it all arguments.
* @tparam Type Type of object to use to initialize the wrapper.
* @tparam Len Size of the storage reserved for the small buffer optimization.
* @tparam Align Optional alignment requirement.
* @tparam Args Types of arguments to use to construct the new instance.
* @param args Parameters to use to construct the instance.
* @return A properly initialized wrapper for an object of the given type.
*/
template<typename Type, std::size_t Len = basic_any<>::length, std::size_t Align = basic_any<Len>::alignment, typename... Args>
basic_any<Len, Align> make_any(Args &&...args) {
return basic_any<Len, Align>{std::in_place_type<Type>, std::forward<Args>(args)...};
}
/**
* @brief Forwards its argument and avoids copies for lvalue references.
* @tparam Len Size of the storage reserved for the small buffer optimization.
* @tparam Align Optional alignment requirement.
* @tparam Type Type of argument to use to construct the new instance.
* @param value Parameter to use to construct the instance.
* @return A properly initialized and not necessarily owning wrapper.
*/
template<std::size_t Len = basic_any<>::length, std::size_t Align = basic_any<Len>::alignment, typename Type>
basic_any<Len, Align> forward_as_any(Type &&value) {
return basic_any<Len, Align>{std::in_place_type<Type &&>, std::forward<Type>(value)};
}
} // namespace entt
#endif

View File

@ -0,0 +1,30 @@
#ifndef ENTT_CORE_ATTRIBUTE_H
#define ENTT_CORE_ATTRIBUTE_H
#ifndef ENTT_EXPORT
# if defined _WIN32 || defined __CYGWIN__ || defined _MSC_VER
# define ENTT_EXPORT __declspec(dllexport)
# define ENTT_IMPORT __declspec(dllimport)
# define ENTT_HIDDEN
# elif defined __GNUC__ && __GNUC__ >= 4
# define ENTT_EXPORT __attribute__((visibility("default")))
# define ENTT_IMPORT __attribute__((visibility("default")))
# define ENTT_HIDDEN __attribute__((visibility("hidden")))
# else /* Unsupported compiler */
# define ENTT_EXPORT
# define ENTT_IMPORT
# define ENTT_HIDDEN
# endif
#endif
#ifndef ENTT_API
# if defined ENTT_API_EXPORT
# define ENTT_API ENTT_EXPORT
# elif defined ENTT_API_IMPORT
# define ENTT_API ENTT_IMPORT
# else /* No API */
# define ENTT_API
# endif
#endif
#endif

View File

@ -0,0 +1,279 @@
#ifndef ENTT_CORE_COMPRESSED_PAIR_HPP
#define ENTT_CORE_COMPRESSED_PAIR_HPP
#include <cstddef>
#include <tuple>
#include <type_traits>
#include <utility>
#include "type_traits.hpp"
namespace entt {
/**
* @cond TURN_OFF_DOXYGEN
* Internal details not to be documented.
*/
namespace internal {
template<typename Type, std::size_t, typename = void>
struct compressed_pair_element {
using reference = Type &;
using const_reference = const Type &;
template<bool Dummy = true, typename = std::enable_if_t<Dummy && std::is_default_constructible_v<Type>>>
constexpr compressed_pair_element() noexcept(std::is_nothrow_default_constructible_v<Type>)
: value{} {}
template<typename Arg, typename = std::enable_if_t<!std::is_same_v<std::remove_cv_t<std::remove_reference_t<Arg>>, compressed_pair_element>>>
constexpr compressed_pair_element(Arg &&arg) noexcept(std::is_nothrow_constructible_v<Type, Arg>)
: value{std::forward<Arg>(arg)} {}
template<typename... Args, std::size_t... Index>
constexpr compressed_pair_element(std::tuple<Args...> args, std::index_sequence<Index...>) noexcept(std::is_nothrow_constructible_v<Type, Args...>)
: value{std::forward<Args>(std::get<Index>(args))...} {}
[[nodiscard]] constexpr reference get() noexcept {
return value;
}
[[nodiscard]] constexpr const_reference get() const noexcept {
return value;
}
private:
Type value;
};
template<typename Type, std::size_t Tag>
struct compressed_pair_element<Type, Tag, std::enable_if_t<is_ebco_eligible_v<Type>>>: Type {
using reference = Type &;
using const_reference = const Type &;
using base_type = Type;
template<bool Dummy = true, typename = std::enable_if_t<Dummy && std::is_default_constructible_v<base_type>>>
constexpr compressed_pair_element() noexcept(std::is_nothrow_default_constructible_v<base_type>)
: base_type{} {}
template<typename Arg, typename = std::enable_if_t<!std::is_same_v<std::remove_cv_t<std::remove_reference_t<Arg>>, compressed_pair_element>>>
constexpr compressed_pair_element(Arg &&arg) noexcept(std::is_nothrow_constructible_v<base_type, Arg>)
: base_type{std::forward<Arg>(arg)} {}
template<typename... Args, std::size_t... Index>
constexpr compressed_pair_element(std::tuple<Args...> args, std::index_sequence<Index...>) noexcept(std::is_nothrow_constructible_v<base_type, Args...>)
: base_type{std::forward<Args>(std::get<Index>(args))...} {}
[[nodiscard]] constexpr reference get() noexcept {
return *this;
}
[[nodiscard]] constexpr const_reference get() const noexcept {
return *this;
}
};
} // namespace internal
/**
* Internal details not to be documented.
* @endcond
*/
/**
* @brief A compressed pair.
*
* A pair that exploits the _Empty Base Class Optimization_ (or _EBCO_) to
* reduce its final size to a minimum.
*
* @tparam First The type of the first element that the pair stores.
* @tparam Second The type of the second element that the pair stores.
*/
template<typename First, typename Second>
class compressed_pair final
: internal::compressed_pair_element<First, 0u>,
internal::compressed_pair_element<Second, 1u> {
using first_base = internal::compressed_pair_element<First, 0u>;
using second_base = internal::compressed_pair_element<Second, 1u>;
public:
/*! @brief The type of the first element that the pair stores. */
using first_type = First;
/*! @brief The type of the second element that the pair stores. */
using second_type = Second;
/**
* @brief Default constructor, conditionally enabled.
*
* This constructor is only available when the types that the pair stores
* are both at least default constructible.
*
* @tparam Dummy Dummy template parameter used for internal purposes.
*/
template<bool Dummy = true, typename = std::enable_if_t<Dummy && std::is_default_constructible_v<first_type> && std::is_default_constructible_v<second_type>>>
constexpr compressed_pair() noexcept(std::is_nothrow_default_constructible_v<first_base> &&std::is_nothrow_default_constructible_v<second_base>)
: first_base{},
second_base{} {}
/**
* @brief Copy constructor.
* @param other The instance to copy from.
*/
constexpr compressed_pair(const compressed_pair &other) noexcept(std::is_nothrow_copy_constructible_v<first_base> &&std::is_nothrow_copy_constructible_v<second_base>) = default;
/**
* @brief Move constructor.
* @param other The instance to move from.
*/
constexpr compressed_pair(compressed_pair &&other) noexcept(std::is_nothrow_move_constructible_v<first_base> &&std::is_nothrow_move_constructible_v<second_base>) = default;
/**
* @brief Constructs a pair from its values.
* @tparam Arg Type of value to use to initialize the first element.
* @tparam Other Type of value to use to initialize the second element.
* @param arg Value to use to initialize the first element.
* @param other Value to use to initialize the second element.
*/
template<typename Arg, typename Other>
constexpr compressed_pair(Arg &&arg, Other &&other) noexcept(std::is_nothrow_constructible_v<first_base, Arg> &&std::is_nothrow_constructible_v<second_base, Other>)
: first_base{std::forward<Arg>(arg)},
second_base{std::forward<Other>(other)} {}
/**
* @brief Constructs a pair by forwarding the arguments to its parts.
* @tparam Args Types of arguments to use to initialize the first element.
* @tparam Other Types of arguments to use to initialize the second element.
* @param args Arguments to use to initialize the first element.
* @param other Arguments to use to initialize the second element.
*/
template<typename... Args, typename... Other>
constexpr compressed_pair(std::piecewise_construct_t, std::tuple<Args...> args, std::tuple<Other...> other) noexcept(std::is_nothrow_constructible_v<first_base, Args...> &&std::is_nothrow_constructible_v<second_base, Other...>)
: first_base{std::move(args), std::index_sequence_for<Args...>{}},
second_base{std::move(other), std::index_sequence_for<Other...>{}} {}
/**
* @brief Copy assignment operator.
* @param other The instance to copy from.
* @return This compressed pair object.
*/
constexpr compressed_pair &operator=(const compressed_pair &other) noexcept(std::is_nothrow_copy_assignable_v<first_base> &&std::is_nothrow_copy_assignable_v<second_base>) = default;
/**
* @brief Move assignment operator.
* @param other The instance to move from.
* @return This compressed pair object.
*/
constexpr compressed_pair &operator=(compressed_pair &&other) noexcept(std::is_nothrow_move_assignable_v<first_base> &&std::is_nothrow_move_assignable_v<second_base>) = default;
/**
* @brief Returns the first element that a pair stores.
* @return The first element that a pair stores.
*/
[[nodiscard]] constexpr first_type &first() noexcept {
return static_cast<first_base &>(*this).get();
}
/*! @copydoc first */
[[nodiscard]] constexpr const first_type &first() const noexcept {
return static_cast<const first_base &>(*this).get();
}
/**
* @brief Returns the second element that a pair stores.
* @return The second element that a pair stores.
*/
[[nodiscard]] constexpr second_type &second() noexcept {
return static_cast<second_base &>(*this).get();
}
/*! @copydoc second */
[[nodiscard]] constexpr const second_type &second() const noexcept {
return static_cast<const second_base &>(*this).get();
}
/**
* @brief Swaps two compressed pair objects.
* @param other The compressed pair to swap with.
*/
constexpr void swap(compressed_pair &other) noexcept(std::is_nothrow_swappable_v<first_type> &&std::is_nothrow_swappable_v<second_type>) {
using std::swap;
swap(first(), other.first());
swap(second(), other.second());
}
/**
* @brief Extracts an element from the compressed pair.
* @tparam Index An integer value that is either 0 or 1.
* @return Returns a reference to the first element if `Index` is 0 and a
* reference to the second element if `Index` is 1.
*/
template<std::size_t Index>
constexpr decltype(auto) get() noexcept {
if constexpr(Index == 0u) {
return first();
} else {
static_assert(Index == 1u, "Index out of bounds");
return second();
}
}
/*! @copydoc get */
template<std::size_t Index>
constexpr decltype(auto) get() const noexcept {
if constexpr(Index == 0u) {
return first();
} else {
static_assert(Index == 1u, "Index out of bounds");
return second();
}
}
};
/**
* @brief Deduction guide.
* @tparam Type Type of value to use to initialize the first element.
* @tparam Other Type of value to use to initialize the second element.
*/
template<typename Type, typename Other>
compressed_pair(Type &&, Other &&) -> compressed_pair<std::decay_t<Type>, std::decay_t<Other>>;
/**
* @brief Swaps two compressed pair objects.
* @tparam First The type of the first element that the pairs store.
* @tparam Second The type of the second element that the pairs store.
* @param lhs A valid compressed pair object.
* @param rhs A valid compressed pair object.
*/
template<typename First, typename Second>
inline constexpr void swap(compressed_pair<First, Second> &lhs, compressed_pair<First, Second> &rhs) {
lhs.swap(rhs);
}
} // namespace entt
// disable structured binding support for clang 6, it messes when specializing tuple_size
#if !defined __clang_major__ || __clang_major__ > 6
namespace std {
/**
* @brief `std::tuple_size` specialization for `compressed_pair`s.
* @tparam First The type of the first element that the pair stores.
* @tparam Second The type of the second element that the pair stores.
*/
template<typename First, typename Second>
struct tuple_size<entt::compressed_pair<First, Second>>: integral_constant<size_t, 2u> {};
/**
* @brief `std::tuple_element` specialization for `compressed_pair`s.
* @tparam Index The index of the type to return.
* @tparam First The type of the first element that the pair stores.
* @tparam Second The type of the second element that the pair stores.
*/
template<size_t Index, typename First, typename Second>
struct tuple_element<Index, entt::compressed_pair<First, Second>>: conditional<Index == 0u, First, Second> {
static_assert(Index < 2u, "Index out of bounds");
};
} // namespace std
#endif
#endif

View File

@ -0,0 +1,97 @@
#ifndef ENTT_CORE_ENUM_HPP
#define ENTT_CORE_ENUM_HPP
#include <type_traits>
namespace entt {
/**
* @brief Enable bitmask support for enum classes.
* @tparam Type The enum type for which to enable bitmask support.
*/
template<typename Type, typename = void>
struct enum_as_bitmask: std::false_type {};
/*! @copydoc enum_as_bitmask */
template<typename Type>
struct enum_as_bitmask<Type, std::void_t<decltype(Type::_entt_enum_as_bitmask)>>: std::is_enum<Type> {};
/**
* @brief Helper variable template.
* @tparam Type The enum class type for which to enable bitmask support.
*/
template<typename Type>
inline constexpr bool enum_as_bitmask_v = enum_as_bitmask<Type>::value;
} // namespace entt
/**
* @brief Operator available for enums for which bitmask support is enabled.
* @tparam Type Enum class type.
* @param lhs The first value to use.
* @param rhs The second value to use.
* @return The result of invoking the operator on the underlying types of the
* two values provided.
*/
template<typename Type>
[[nodiscard]] constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, Type>
operator|(const Type lhs, const Type rhs) noexcept {
return static_cast<Type>(static_cast<std::underlying_type_t<Type>>(lhs) | static_cast<std::underlying_type_t<Type>>(rhs));
}
/*! @copydoc operator| */
template<typename Type>
[[nodiscard]] constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, Type>
operator&(const Type lhs, const Type rhs) noexcept {
return static_cast<Type>(static_cast<std::underlying_type_t<Type>>(lhs) & static_cast<std::underlying_type_t<Type>>(rhs));
}
/*! @copydoc operator| */
template<typename Type>
[[nodiscard]] constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, Type>
operator^(const Type lhs, const Type rhs) noexcept {
return static_cast<Type>(static_cast<std::underlying_type_t<Type>>(lhs) ^ static_cast<std::underlying_type_t<Type>>(rhs));
}
/**
* @brief Operator available for enums for which bitmask support is enabled.
* @tparam Type Enum class type.
* @param value The value to use.
* @return The result of invoking the operator on the underlying types of the
* value provided.
*/
template<typename Type>
[[nodiscard]] constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, Type>
operator~(const Type value) noexcept {
return static_cast<Type>(~static_cast<std::underlying_type_t<Type>>(value));
}
/*! @copydoc operator~ */
template<typename Type>
[[nodiscard]] constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, bool>
operator!(const Type value) noexcept {
return !static_cast<std::underlying_type_t<Type>>(value);
}
/*! @copydoc operator| */
template<typename Type>
constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, Type &>
operator|=(Type &lhs, const Type rhs) noexcept {
return (lhs = (lhs | rhs));
}
/*! @copydoc operator| */
template<typename Type>
constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, Type &>
operator&=(Type &lhs, const Type rhs) noexcept {
return (lhs = (lhs & rhs));
}
/*! @copydoc operator| */
template<typename Type>
constexpr std::enable_if_t<entt::enum_as_bitmask_v<Type>, Type &>
operator^=(Type &lhs, const Type rhs) noexcept {
return (lhs = (lhs ^ rhs));
}
#endif

Some files were not shown because too many files have changed in this diff Show More