1 // Copyright 2022 Google LLC
2 //
3 // This source code is licensed under the BSD-style license found in the
4 // LICENSE file in the root directory of this source tree.
5
6 #include <cstdint> // For uintptr_t.
7 #include <cstring> // For memcpy.
8
9 #include <xnnpack.h>
10 #include <xnnpack/cache.h>
11
12 #include <gtest/gtest.h>
13
cache_end(const xnn_code_cache * cache)14 static void* cache_end(const xnn_code_cache* cache) {
15 return reinterpret_cast<void *>(reinterpret_cast<uintptr_t>(cache->cache.code.start) + cache->cache.code.size);
16 }
17
write_code(xnn_code_cache * cache,const std::string & str)18 static void write_code(xnn_code_cache* cache, const std::string& str) {
19 ASSERT_GE(cache->cache.code.capacity - cache->cache.code.size, str.length());
20 std::memcpy(cache_end(cache), str.data(), str.length());
21 cache->cache.code.size += str.length();
22 };
23
TEST(CODE_CACHE,init_and_release)24 TEST(CODE_CACHE, init_and_release)
25 {
26 xnn_initialize(/*allocator=*/nullptr);
27 xnn_code_cache cache;
28 EXPECT_EQ(xnn_status_success, xnn_init_code_cache(&cache));
29 EXPECT_EQ(xnn_status_success, xnn_release_code_cache(&cache));
30 }
31
32
TEST(CODE_CACHE,release_null)33 TEST(CODE_CACHE, release_null)
34 {
35 EXPECT_EQ(xnn_status_success, xnn_release_code_cache(NULL));
36 }
37
TEST(CODE_CACHE,get_or_insert)38 TEST(CODE_CACHE, get_or_insert)
39 {
40 xnn_initialize(/*allocator=*/nullptr);
41 xnn_code_cache cache;
42 EXPECT_EQ(xnn_status_success, xnn_init_code_cache(&cache));
43
44 write_code(&cache, "1234");
45 ASSERT_EQ(0, xnn_get_or_insert_code_cache(&cache, cache.cache.code.start, 4));
46 ASSERT_EQ(0, cache.cache.hits);
47 ASSERT_EQ(1, cache.cache.misses);
48
49 void* span2_code = cache_end(&cache);
50 // Simulate a cache hit.
51 write_code(&cache, "1234");
52 ASSERT_EQ(0, xnn_get_or_insert_code_cache(&cache, span2_code, 4));
53 ASSERT_EQ(1, cache.cache.hits);
54 ASSERT_EQ(1, cache.cache.misses);
55
56 void* span3_code = cache_end(&cache);
57 // Simulate a cache miss.
58 write_code(&cache, "5678");
59 ASSERT_EQ(4, xnn_get_or_insert_code_cache(&cache, span3_code, 4));
60 ASSERT_EQ(1, cache.cache.hits);
61 ASSERT_EQ(2, cache.cache.misses);
62 ASSERT_EQ(2, cache.cache.num_entries);
63
64 EXPECT_EQ(xnn_status_success, xnn_release_code_cache(&cache));
65 }
66
TEST(CODE_CACHE,grow)67 TEST(CODE_CACHE, grow) {
68 xnn_initialize(/*allocator=*/nullptr);
69 xnn_code_cache cache;
70 EXPECT_EQ(xnn_status_success, xnn_init_code_cache(&cache));
71 size_t old_num_buckets = cache.cache.num_buckets;
72 for (size_t i = 0, expected_offset = 0; i < old_num_buckets; i++) {
73 // Add many entries to force cache to grow.
74 const std::string s = std::to_string(i);
75 // write_code will update cache size, so get the code offset first.
76 void* code_ptr = cache_end(&cache);
77 write_code(&cache, s);
78 ASSERT_EQ(expected_offset, xnn_get_or_insert_code_cache(&cache, code_ptr, s.length()));
79 expected_offset += s.length();
80 }
81
82 ASSERT_EQ(0, cache.cache.hits);
83 ASSERT_EQ(old_num_buckets, cache.cache.num_entries);
84 // Check that cache has grown.
85 ASSERT_LT(old_num_buckets, cache.cache.num_buckets);
86 // Check that all the entries are still in cache.
87 for (size_t i = 0, expected_offset = 0; i < old_num_buckets; i++) {
88 const std::string s = std::to_string(i);
89 // write_code will update cache size, so get the code offset first.
90 void* code_ptr = cache_end(&cache);
91 write_code(&cache, s);
92 ASSERT_EQ(expected_offset, xnn_get_or_insert_code_cache(&cache, code_ptr, s.length()));
93 expected_offset += s.length();
94 }
95 // And now all of the lookups should be cache hits.
96 ASSERT_EQ(old_num_buckets, cache.cache.hits);
97
98 EXPECT_EQ(xnn_status_success, xnn_release_code_cache(&cache));
99 }
100