forked from mindsdb/mindsdb
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_cache.py
More file actions
89 lines (65 loc) · 2.19 KB
/
test_cache.py
File metadata and controls
89 lines (65 loc) · 2.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import datetime as dt
import time
import unittest
import traceback
import redis
import warnings
import tempfile
import json
import os
import pandas as pd
from mindsdb.utilities.cache import RedisCache, FileCache, dataframe_checksum
class TestCashe(unittest.TestCase):
@classmethod
def setup_class(cls):
# config
config = {}
# TODO run on own database
fdi, cfg_file = tempfile.mkstemp(prefix='mindsdb_conf_')
with os.fdopen(fdi, 'w') as fd:
json.dump(config, fd)
os.environ['MINDSDB_CONFIG_PATH'] = cfg_file
def test_redis(self):
cache = RedisCache('predict', max_size=2)
try:
self.cache_test(cache)
except redis.ConnectionError as e:
# Skip test for redis if no redis installed
warnings.warn(f'redis is not available: {e}')
print(traceback.format_exc())
def test_file(self):
cache = FileCache('predict', max_size=2)
self.cache_test(cache)
def cache_test(self, cache):
# test save
df = pd.DataFrame([
[1, 1.2, 'string', dt.datetime.now(), [1, 2, 3], {1: 3}],
[2, 3.2, 'other', dt.datetime(2011, 12, 30), [3], {11: 23, 2: 3}],
], columns=['a', 'b', 'c', 'd', 'e', 'f'])
# make bigger
df = pd.concat([df] * 100).reset_index()
name = dataframe_checksum(df)
# test save
cache.set(name, df)
df2 = cache.get(name)
assert dataframe_checksum(df) == dataframe_checksum(df2)
assert list(df.columns) == list(df2.columns)
# test save df
name += '1'
cache.set_df(name, df)
df2 = cache.get_df(name)
assert dataframe_checksum(df) == dataframe_checksum(df2)
assert list(df.columns) == list(df2.columns)
# test delete
cache.delete(name)
df2 = cache.get(name)
assert df2 is None
# test max_size
# load cache with size 2(max_size) + 5 (buffer)
cache.set('first', df)
for i in range(8):
time.sleep(0.01)
cache.set(str(i), df)
# get first, must be deleted
df2 = cache.get('first')
assert df2 is None