1 /*
2  * Copyright © 2015-2020 Inria.  All rights reserved.
3  * See COPYING in top-level directory.
4  */
5 
6 #include "private/autogen/config.h"
7 #include "hwloc.h"
8 #include "private/private.h"
9 
hwloc_look_hardwired_fujitsu_k(struct hwloc_topology * topology)10 int hwloc_look_hardwired_fujitsu_k(struct hwloc_topology *topology)
11 {
12   /* If a broken core gets disabled, its bit disappears and other core bits are NOT shifted towards 0.
13    * Node is not given to user job, not need to handle that case properly.
14    */
15   unsigned i;
16   hwloc_obj_t obj;
17   hwloc_bitmap_t set;
18 
19   for(i=0; i<8; i++) {
20     set = hwloc_bitmap_alloc();
21     hwloc_bitmap_set(set, i);
22 
23     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L1ICACHE)) {
24       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L1ICACHE, HWLOC_UNKNOWN_INDEX);
25       obj->cpuset = hwloc_bitmap_dup(set);
26       obj->attr->cache.type = HWLOC_OBJ_CACHE_INSTRUCTION;
27       obj->attr->cache.depth = 1;
28       obj->attr->cache.size = 32*1024;
29       obj->attr->cache.linesize = 128;
30       obj->attr->cache.associativity = 2;
31       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:k:l1icache");
32     }
33     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L1CACHE)) {
34       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L1CACHE, HWLOC_UNKNOWN_INDEX);
35       obj->cpuset = hwloc_bitmap_dup(set);
36       obj->attr->cache.type = HWLOC_OBJ_CACHE_DATA;
37       obj->attr->cache.depth = 1;
38       obj->attr->cache.size = 32*1024;
39       obj->attr->cache.linesize = 128;
40       obj->attr->cache.associativity = 2;
41       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:k:l1dcache");
42     }
43     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_CORE)) {
44       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_CORE, i);
45       obj->cpuset = set;
46       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:k:core");
47     } else
48       hwloc_bitmap_free(set);
49   }
50 
51   set = hwloc_bitmap_alloc();
52   hwloc_bitmap_set_range(set, 0, 7);
53 
54   if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L2CACHE)) {
55     obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L2CACHE, HWLOC_UNKNOWN_INDEX);
56     obj->cpuset = hwloc_bitmap_dup(set);
57     obj->attr->cache.type = HWLOC_OBJ_CACHE_UNIFIED;
58     obj->attr->cache.depth = 2;
59     obj->attr->cache.size = 6*1024*1024;
60     obj->attr->cache.linesize = 128;
61     obj->attr->cache.associativity = 12;
62     hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:k:l2cache");
63   }
64   if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_PACKAGE)) {
65     obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_PACKAGE, 0);
66     obj->cpuset = set;
67     hwloc_obj_add_info(obj, "CPUVendor", "Fujitsu");
68     hwloc_obj_add_info(obj, "CPUModel", "SPARC64 VIIIfx");
69     hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:k:package");
70   } else
71     hwloc_bitmap_free(set);
72 
73   topology->support.discovery->pu = 1;
74   hwloc_setup_pu_level(topology, 8);
75 
76   return 0;
77 }
78 
hwloc_look_hardwired_fujitsu_fx10(struct hwloc_topology * topology)79 int hwloc_look_hardwired_fujitsu_fx10(struct hwloc_topology *topology)
80 {
81   /* If a broken core gets disabled, its bit disappears and other core bits are NOT shifted towards 0.
82    * Node is not given to user job, not need to handle that case properly.
83    */
84   unsigned i;
85   hwloc_obj_t obj;
86   hwloc_bitmap_t set;
87 
88   for(i=0; i<16; i++) {
89     set = hwloc_bitmap_alloc();
90     hwloc_bitmap_set(set, i);
91 
92     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L1ICACHE)) {
93       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L1ICACHE, HWLOC_UNKNOWN_INDEX);
94       obj->cpuset = hwloc_bitmap_dup(set);
95       obj->attr->cache.type = HWLOC_OBJ_CACHE_INSTRUCTION;
96       obj->attr->cache.depth = 1;
97       obj->attr->cache.size = 32*1024;
98       obj->attr->cache.linesize = 128;
99       obj->attr->cache.associativity = 2;
100       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx10:l1icache");
101     }
102     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L1CACHE)) {
103       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L1CACHE, HWLOC_UNKNOWN_INDEX);
104       obj->cpuset = hwloc_bitmap_dup(set);
105       obj->attr->cache.type = HWLOC_OBJ_CACHE_DATA;
106       obj->attr->cache.depth = 1;
107       obj->attr->cache.size = 32*1024;
108       obj->attr->cache.linesize = 128;
109       obj->attr->cache.associativity = 2;
110       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx10:l1dcache");
111     }
112     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_CORE)) {
113       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_CORE, i);
114       obj->cpuset = set;
115       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx10:core");
116     } else
117       hwloc_bitmap_free(set);
118   }
119 
120   set = hwloc_bitmap_alloc();
121   hwloc_bitmap_set_range(set, 0, 15);
122 
123   if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L2CACHE)) {
124     obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L2CACHE, HWLOC_UNKNOWN_INDEX);
125     obj->cpuset = hwloc_bitmap_dup(set);
126     obj->attr->cache.type = HWLOC_OBJ_CACHE_UNIFIED;
127     obj->attr->cache.depth = 2;
128     obj->attr->cache.size = 12*1024*1024;
129     obj->attr->cache.linesize = 128;
130     obj->attr->cache.associativity = 24;
131     hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx10:l2cache");
132   }
133   if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_PACKAGE)) {
134     obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_PACKAGE, 0);
135     obj->cpuset = set;
136     hwloc_obj_add_info(obj, "CPUVendor", "Fujitsu");
137     hwloc_obj_add_info(obj, "CPUModel", "SPARC64 IXfx");
138     hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx10:package");
139   } else
140     hwloc_bitmap_free(set);
141 
142   topology->support.discovery->pu = 1;
143   hwloc_setup_pu_level(topology, 16);
144 
145   return 0;
146 }
147 
hwloc_look_hardwired_fujitsu_fx100(struct hwloc_topology * topology)148 int hwloc_look_hardwired_fujitsu_fx100(struct hwloc_topology *topology)
149 {
150   /* If a broken core gets disabled, its bit disappears and other core bits are NOT shifted towards 0.
151    * Node is not given to user job, not need to handle that case properly.
152    */
153   unsigned i;
154   hwloc_obj_t obj;
155   hwloc_bitmap_t set;
156 
157   for(i=0; i<34; i++) {
158     set = hwloc_bitmap_alloc();
159     hwloc_bitmap_set(set, i);
160 
161     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L1ICACHE)) {
162       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L1ICACHE, HWLOC_UNKNOWN_INDEX);
163       obj->cpuset = hwloc_bitmap_dup(set);
164       obj->attr->cache.type = HWLOC_OBJ_CACHE_INSTRUCTION;
165       obj->attr->cache.depth = 1;
166       obj->attr->cache.size = 64*1024;
167       obj->attr->cache.linesize = 256;
168       obj->attr->cache.associativity = 4;
169       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx100:l1icache");
170     }
171     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L1CACHE)) {
172       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L1CACHE, HWLOC_UNKNOWN_INDEX);
173       obj->cpuset = hwloc_bitmap_dup(set);
174       obj->attr->cache.type = HWLOC_OBJ_CACHE_DATA;
175       obj->attr->cache.depth = 1;
176       obj->attr->cache.size = 64*1024;
177       obj->attr->cache.linesize = 256;
178       obj->attr->cache.associativity = 4;
179       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx100:l1dcache");
180     }
181     if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_CORE)) {
182       obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_CORE, i);
183       obj->cpuset = set;
184       hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired::fx100:core");
185     } else
186       hwloc_bitmap_free(set);
187   }
188 
189   if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_L2CACHE)) {
190     obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L2CACHE, HWLOC_UNKNOWN_INDEX);
191     obj->cpuset = hwloc_bitmap_alloc();
192     hwloc_bitmap_set_range(obj->cpuset, 0, 15);
193     hwloc_bitmap_set(obj->cpuset, 32);
194     obj->attr->cache.type = HWLOC_OBJ_CACHE_UNIFIED;
195     obj->attr->cache.depth = 2;
196     obj->attr->cache.size = 12*1024*1024;
197     obj->attr->cache.linesize = 256;
198     obj->attr->cache.associativity = 24;
199     hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx100:l2cache#0");
200 
201     obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_L2CACHE, HWLOC_UNKNOWN_INDEX);
202     obj->cpuset = hwloc_bitmap_alloc();
203     hwloc_bitmap_set_range(obj->cpuset, 16, 31);
204     hwloc_bitmap_set(obj->cpuset, 33);
205     obj->attr->cache.type = HWLOC_OBJ_CACHE_UNIFIED;
206     obj->attr->cache.depth = 2;
207     obj->attr->cache.size = 12*1024*1024;
208     obj->attr->cache.linesize = 256;
209     obj->attr->cache.associativity = 24;
210     hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx100:l2cache#1");
211   }
212   if (hwloc_filter_check_keep_object_type(topology, HWLOC_OBJ_PACKAGE)) {
213     obj = hwloc_alloc_setup_object(topology, HWLOC_OBJ_PACKAGE, 0);
214     obj->cpuset = hwloc_bitmap_alloc();
215     hwloc_bitmap_set_range(obj->cpuset, 0, 33);
216     hwloc_obj_add_info(obj, "CPUVendor", "Fujitsu");
217     hwloc_obj_add_info(obj, "CPUModel", "SPARC64 XIfx");
218     hwloc__insert_object_by_cpuset(topology, NULL, obj, "hardwired:fx100:package");
219   }
220 
221   topology->support.discovery->pu = 1;
222   hwloc_setup_pu_level(topology, 34);
223 
224   return 0;
225 }
226