Print this page
6154 const-ify segment ops structures


 280         vmu_cache_t     *vmu_cache;             /* Cached results */
 281         kthread_t       *vmu_calc_thread;       /* NULL, or thread running */
 282                                                 /* vmu_calculate() */
 283         uint_t          vmu_calc_flags;         /* Flags being using by */
 284                                                 /* currently running calc */
 285                                                 /* thread */
 286         uint_t          vmu_pending_flags;      /* Flags of vm_getusage() */
 287                                                 /* threads waiting for */
 288                                                 /* calc thread to finish */
 289         uint_t          vmu_pending_waiters;    /* Number of threads waiting */
 290                                                 /* for calc thread */
 291         vmu_bound_t     *vmu_free_bounds;
 292         vmu_object_t    *vmu_free_objects;
 293         vmu_entity_t    *vmu_free_entities;
 294         vmu_zone_t      *vmu_free_zones;
 295 } vmu_data_t;
 296 
 297 extern struct as kas;
 298 extern proc_t *practive;
 299 extern zone_t *global_zone;
 300 extern struct seg_ops segvn_ops;
 301 extern struct seg_ops segspt_shmops;
 302 
 303 static vmu_data_t vmu_data;
 304 static kmem_cache_t *vmu_bound_cache;
 305 static kmem_cache_t *vmu_object_cache;
 306 
 307 /*
 308  * Comparison routine for AVL tree. We base our comparison on vmb_start.
 309  */
 310 static int
 311 bounds_cmp(const void *bnd1, const void *bnd2)
 312 {
 313         const vmu_bound_t *bound1 = bnd1;
 314         const vmu_bound_t *bound2 = bnd2;
 315 
 316         if (bound1->vmb_start == bound2->vmb_start) {
 317                 return (0);
 318         }
 319         if (bound1->vmb_start < bound2->vmb_start) {
 320                 return (-1);
 321         }




 280         vmu_cache_t     *vmu_cache;             /* Cached results */
 281         kthread_t       *vmu_calc_thread;       /* NULL, or thread running */
 282                                                 /* vmu_calculate() */
 283         uint_t          vmu_calc_flags;         /* Flags being using by */
 284                                                 /* currently running calc */
 285                                                 /* thread */
 286         uint_t          vmu_pending_flags;      /* Flags of vm_getusage() */
 287                                                 /* threads waiting for */
 288                                                 /* calc thread to finish */
 289         uint_t          vmu_pending_waiters;    /* Number of threads waiting */
 290                                                 /* for calc thread */
 291         vmu_bound_t     *vmu_free_bounds;
 292         vmu_object_t    *vmu_free_objects;
 293         vmu_entity_t    *vmu_free_entities;
 294         vmu_zone_t      *vmu_free_zones;
 295 } vmu_data_t;
 296 
 297 extern struct as kas;
 298 extern proc_t *practive;
 299 extern zone_t *global_zone;
 300 extern const struct seg_ops segvn_ops;
 301 extern const struct seg_ops segspt_shmops;
 302 
 303 static vmu_data_t vmu_data;
 304 static kmem_cache_t *vmu_bound_cache;
 305 static kmem_cache_t *vmu_object_cache;
 306 
 307 /*
 308  * Comparison routine for AVL tree. We base our comparison on vmb_start.
 309  */
 310 static int
 311 bounds_cmp(const void *bnd1, const void *bnd2)
 312 {
 313         const vmu_bound_t *bound1 = bnd1;
 314         const vmu_bound_t *bound2 = bnd2;
 315 
 316         if (bound1->vmb_start == bound2->vmb_start) {
 317                 return (0);
 318         }
 319         if (bound1->vmb_start < bound2->vmb_start) {
 320                 return (-1);
 321         }