blib/lib/Data/Sync.pm | |||
---|---|---|---|
Criterion | Covered | Total | % |
statement | 125 | 539 | 23.1 |
branch | 36 | 174 | 20.6 |
condition | 3 | 36 | 8.3 |
subroutine | 18 | 49 | 36.7 |
pod | 14 | 40 | 35.0 |
total | 196 | 838 | 23.3 |
line | stmt | bran | cond | sub | pod | time | code |
---|---|---|---|---|---|---|---|
1 | ##################################################################### | ||||||
2 | # Data::Sync | ||||||
3 | # | ||||||
4 | # Classes to make development of metadirectory/datapump apps | ||||||
5 | # simple and fast | ||||||
6 | # | ||||||
7 | # C Colbourn 2005 | ||||||
8 | # | ||||||
9 | ##################################################################### | ||||||
10 | # Revision | ||||||
11 | # ======== | ||||||
12 | # | ||||||
13 | # 0.01 CColbourn New module | ||||||
14 | # | ||||||
15 | # 0.02 CColbourn Enhancements - see POD CHANGES | ||||||
16 | # | ||||||
17 | # 0.03 CColbourn Enhancements - see POD CHANGES | ||||||
18 | # | ||||||
19 | # 0.04 CColbourn Enhancements - see POD CHANGES | ||||||
20 | # | ||||||
21 | # 0.05 CColbourn Bugfix - see POD CHANGES | ||||||
22 | # | ||||||
23 | # 0.06 CColbourn Enhancements - see POD CHANGES | ||||||
24 | # | ||||||
25 | # 0.07 CColbourn Enhancements - see POD CHANGES | ||||||
26 | # | ||||||
27 | # 0.08 CColbourn Enhancements - see POD CHANGES | ||||||
28 | # | ||||||
29 | ##################################################################### | ||||||
30 | # Notes | ||||||
31 | # ===== | ||||||
32 | # | ||||||
33 | ##################################################################### | ||||||
34 | |||||||
35 | 3 | 3 | 22665 | use strict; | |||
3 | 7 | ||||||
3 | 145 | ||||||
36 | 3 | 3 | 17 | use warnings; | |||
3 | 7 | ||||||
3 | 34564 | ||||||
37 | |||||||
38 | |||||||
39 | package Data::Sync; | ||||||
40 | our $VERSION="0.08"; | ||||||
41 | |||||||
42 | ##################################################################### | ||||||
43 | # New - constructor of datasync object | ||||||
44 | # | ||||||
45 | # takes parameters. | ||||||
46 | # returns blessed object | ||||||
47 | # | ||||||
48 | ##################################################################### | ||||||
49 | sub new | ||||||
50 | { | ||||||
51 | 3 | 3 | 0 | 165 | my $self = shift; | ||
52 | 3 | 6 | my %synchash; | ||||
53 | 3 | 9 | my %params = @_; | ||||
54 | |||||||
55 | # make the object first! | ||||||
56 | 3 | 10 | my $syncobject = bless \%synchash,$self; | ||||
57 | |||||||
58 | # define logging. If logging not set, use a coderef of return | ||||||
59 | 3 | 100 | 14 | if ($params{'log'}) | |||
60 | { | ||||||
61 | 1 | 6 | $syncobject->{'log'} = \&writelog; | ||||
62 | 1 | 4 | $syncobject->{'loghandle'} = $params{'log'}; | ||||
63 | } | ||||||
64 | else | ||||||
65 | { | ||||||
66 | 47 | 47 | 57 | $syncobject->{'log'} = sub{return} | |||
67 | 2 | 19 | } | ||||
68 | 3 | 50 | 12 | if ($params{'configfile'}) | |||
69 | { | ||||||
70 | 0 | 0 | my $return = $syncobject->load($params{'configfile'}); | ||||
71 | 0 | 0 | 0 | if (!$return) | |||
72 | { | ||||||
73 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: Not a readable config file ".$params{'configfile'}); | ||||
74 | 0 | 0 | $self->{'lasterror'} = "Not a readable config file ".$params{'configfile'}; | ||||
75 | return | ||||||
76 | 0 | 0 | } | ||||
77 | } | ||||||
78 | |||||||
79 | # assign the jobname (only needed if hashing or record mapping) | ||||||
80 | 3 | 50 | 18 | if (!$params{'jobname'}) | |||
81 | { | ||||||
82 | 3 | 9 | $params{'jobname'}="noname"; | ||||
83 | } | ||||||
84 | 3 | 9034 | $syncobject->{'name'} = $params{'jobname'}; | ||||
85 | |||||||
86 | # define the default multivalue record separator for concatenating LDAP multivalue attributes into a string | ||||||
87 | 3 | 8 | $syncobject->{'mvseparator'} = "|"; | ||||
88 | |||||||
89 | # put something to stdout for progress reporting. Convenience for debugging. Deliberately undocumented | ||||||
90 | 3 | 50 | 13 | if ($params{'progressoutputs'}) | |||
91 | { | ||||||
92 | 0 | 0 | 0 | 0 | if (!$params{'readprogress'}){$syncobject->{'readprogress'} = sub{print "R"}} | ||
0 | 0 | ||||||
0 | 0 | ||||||
93 | 0 | 0 | 0 | 0 | if (!$params{'transformprogress'}){$syncobject->{'transformprogress'} = sub {print "T"}} | ||
0 | 0 | ||||||
0 | 0 | ||||||
94 | 0 | 0 | 0 | 0 | if (!$params{'writeprogress'}){$syncobject->{'writeprogress'} = sub {print "W"}} | ||
0 | 0 | ||||||
0 | 0 | ||||||
95 | } | ||||||
96 | else | ||||||
97 | { | ||||||
98 | 3 | 50 | 0 | 17 | if(!$params{'readprogress'}){$syncobject->{'readprogress'} = sub {return}}; | ||
3 | 17 | ||||||
0 | 0 | ||||||
99 | 3 | 50 | 15 | 19 | if(!$params{'transformprogress'}){$syncobject->{'transformprogress'} = sub {return}}; | ||
3 | 11 | ||||||
15 | 184 | ||||||
100 | 3 | 50 | 0 | 13 | if(!$params{'writeprogress'}){$syncobject->{'writeprogress'} = sub {return}}; | ||
3 | 17 | ||||||
0 | 0 | ||||||
101 | } | ||||||
102 | |||||||
103 | # return the object | ||||||
104 | 3 | 15 | return $syncobject; | ||||
105 | |||||||
106 | } | ||||||
107 | |||||||
108 | ##################################################################### | ||||||
109 | # source | ||||||
110 | # | ||||||
111 | # defines the source data type & match critera | ||||||
112 | # | ||||||
113 | # takes $handle,\%search criteria | ||||||
114 | # returns true on successful definition | ||||||
115 | ##################################################################### | ||||||
116 | sub source | ||||||
117 | { | ||||||
118 | 0 | 0 | 1 | 0 | my $self = shift; | ||
119 | 0 | 0 | my $handle = shift; | ||||
120 | 0 | 0 | my $criteriaref = shift; | ||||
121 | |||||||
122 | # do this regardless | ||||||
123 | 0 | 0 | $self->{'readhandle'}=$handle; | ||||
124 | |||||||
125 | 0 | 0 | 0 | if (!$criteriaref) | |||
126 | { | ||||||
127 | # this /should/ mean the config is coming from a file | ||||||
128 | # so return if the configs already been loaded (and | ||||||
129 | # if not, it won't hurt anything to continue) | ||||||
130 | 0 | 0 | 0 | if ($self->{'readcriteria'}) | |||
131 | { | ||||||
132 | 0 | 0 | return 1; | ||||
133 | } | ||||||
134 | } | ||||||
135 | |||||||
136 | # assign the criteria hash as properties | ||||||
137 | 0 | 0 | $self->{'readcriteria'} = $criteriaref; | ||||
138 | |||||||
139 | 0 | 0 | 0 | if (!$self->{'readcriteria'}->{'batchsize'}) | |||
140 | { | ||||||
141 | 0 | 0 | $self->{'readcriteria'}->{'batchsize'}=0; | ||||
142 | } | ||||||
143 | |||||||
144 | # Create coderef for LDAP | ||||||
145 | 0 | 0 | 0 | if ($handle =~/LDAP/) | |||
146 | { | ||||||
147 | 0 | 0 | $self->{'read'} = \&readldap; | ||||
148 | } | ||||||
149 | |||||||
150 | # everything else will be DBI/SQL | ||||||
151 | else | ||||||
152 | { | ||||||
153 | 0 | 0 | $self->{'read'} =\&readdbi | ||||
154 | } | ||||||
155 | |||||||
156 | 0 | 0 | 1; | ||||
157 | } | ||||||
158 | |||||||
159 | ######################################################### | ||||||
160 | # readldap - read from an ldap datasource | ||||||
161 | # | ||||||
162 | # takes object as param | ||||||
163 | # | ||||||
164 | # returns result handle | ||||||
165 | ######################################################### | ||||||
166 | sub readldap | ||||||
167 | { | ||||||
168 | |||||||
169 | 0 | 0 | 0 | 0 | my $self = shift; | ||
170 | 0 | 0 | my $result = $self->{'readhandle'}->search | ||||
171 | (filter=>$self->{'readcriteria'}->{'filter'}, | ||||||
172 | base=>$self->{'readcriteria'}->{'base'}, | ||||||
173 | scope=>$self->{'readcriteria'}->{'scope'}, | ||||||
174 | attrs=>$self->{'readcriteria'}->{'attrs'}, | ||||||
175 | controls=>[$self->{'readcriteria'}->{'controls'}]); | ||||||
176 | 0 | 0 | 0 | if ($result->code) | |||
177 | 0 | 0 | { | ||||
178 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR:".$result->error); | ||||
179 | 0 | 0 | return $result->error | ||||
180 | } | ||||||
181 | else {return $result} | ||||||
182 | } | ||||||
183 | |||||||
184 | ######################################################### | ||||||
185 | # readdbi - read from a dbi datasource | ||||||
186 | # | ||||||
187 | # takes object as param | ||||||
188 | # | ||||||
189 | # returns result handle | ||||||
190 | ######################################################### | ||||||
191 | sub readdbi | ||||||
192 | { | ||||||
193 | 0 | 0 | 0 | 0 | my $self = shift; | ||
194 | |||||||
195 | 0 | 0 | 0 | my $stm = $self->{'readhandle'}->prepare($self->{'readcriteria'}->{'select'}) or return; | |||
196 | 0 | 0 | my $result = $stm->execute; | ||||
197 | 0 | 0 | 0 | 0 | if ($result || $result eq "0E0"){return $stm} | ||
0 | 0 | ||||||
198 | else | ||||||
199 | { | ||||||
200 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: Could not read from database"); | ||||
201 | 0 | 0 | $self->{'lasterror'}="ERROR: Could not read from database"; | ||||
202 | 0 | 0 | return undef; | ||||
203 | } | ||||||
204 | } | ||||||
205 | |||||||
206 | |||||||
207 | |||||||
208 | |||||||
209 | ######################################################### | ||||||
210 | # target - define the data target | ||||||
211 | # | ||||||
212 | # takes $handle, \%writecriteria | ||||||
213 | # | ||||||
214 | # returns 1 on success | ||||||
215 | # | ||||||
216 | ######################################################### | ||||||
217 | sub target | ||||||
218 | { | ||||||
219 | 0 | 0 | 1 | 0 | my $self = shift; | ||
220 | 0 | 0 | my $handle = shift; | ||||
221 | |||||||
222 | 0 | 0 | $self->{'writehandle'} = $handle; | ||||
223 | |||||||
224 | 0 | 0 | my $criteriaref = shift; | ||||
225 | 0 | 0 | 0 | if (!$criteriaref) | |||
226 | { | ||||||
227 | # this /may/ mean the config is coming from a file | ||||||
228 | # so return if the configs already been loaded (and | ||||||
229 | # if not, it may be an ldap target so continue; | ||||||
230 | 0 | 0 | 0 | 0 | if ($self->{'readcriteria'} && $handle!~/LDAP/) | ||
231 | { | ||||||
232 | 0 | 0 | return 1; | ||||
233 | } | ||||||
234 | } | ||||||
235 | else | ||||||
236 | { | ||||||
237 | 0 | 0 | $self->{'writecriteria'} = $criteriaref; | ||||
238 | } | ||||||
239 | |||||||
240 | # LDAP index is /always/ DN, but an index is needed for hashing. | ||||||
241 | 0 | 0 | 0 | if ($handle =~ /LDAP/){$criteriaref->{index} = "dn"} | |||
0 | 0 | ||||||
242 | |||||||
243 | |||||||
244 | # Checking for fubars | ||||||
245 | 0 | 0 | 0 | 0 | if ($criteriaref->{hashattributes} && !$criteriaref->{index}) | ||
246 | { | ||||||
247 | 0 | 0 | $self->{log}->($self->{loghandle},"Can't set a target with hashing and no index!"); | ||||
248 | 0 | 0 | $self->lasterror = "Can't set a target with hashing and no index!"; | ||||
249 | 0 | 0 | return; | ||||
250 | } | ||||||
251 | |||||||
252 | |||||||
253 | |||||||
254 | # create coderef to write to LDAP | ||||||
255 | 0 | 0 | 0 | if ($handle =~/LDAP/) | |||
256 | { | ||||||
257 | 0 | 0 | $self->{'write'} = \&writeldap; | ||||
258 | } | ||||||
259 | |||||||
260 | # write coderef for DBI | ||||||
261 | 0 | 0 | 0 | if ($handle =~/DBI/) | |||
262 | { | ||||||
263 | 0 | 0 | $self->{'write'} = \&writedbi; | ||||
264 | } | ||||||
265 | 0 | 0 | 1; | ||||
266 | } | ||||||
267 | |||||||
268 | ######################################################## | ||||||
269 | # writedbi | ||||||
270 | # | ||||||
271 | # takes object as param | ||||||
272 | # | ||||||
273 | # return t/f | ||||||
274 | ######################################################## | ||||||
275 | sub writedbi | ||||||
276 | { | ||||||
277 | 0 | 0 | 0 | 0 | my $self = shift; | ||
278 | 0 | 0 | my $writedata = shift; | ||||
279 | |||||||
280 | |||||||
281 | 0 | 0 | for my $line (@$writedata) | ||||
282 | { | ||||||
283 | 0 | 0 | 0 | 0 | if ($line->{'Data::Sync::Action'} && $line->{'Data::Sync::Action'} eq "DELETE") | ||
284 | { | ||||||
285 | 0 | 0 | my $delete = "delete from ".$self->{writecriteria}->{table}. " where "; | ||||
286 | 0 | 0 | $delete .= $self->{writecriteria}->{index} ."=?"; | ||||
287 | |||||||
288 | 0 | 0 | my $stm = $self->{writehandle}->prepare($delete); | ||||
289 | |||||||
290 | 0 | 0 | my $result = $stm->execute($line->{$self->{writecriteria}->{index}}); | ||||
291 | 0 | 0 | 0 | 0 | if (!$result || $result eq "0E0") | ||
292 | { | ||||||
293 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: Delete failed because ".$self->{'writehandle'}->errstr); | ||||
294 | 0 | 0 | $self->{'lasterror'}="ERROR: Delete failed because ".$self->{'writehandle'}->errstr; | ||||
295 | } | ||||||
296 | |||||||
297 | 0 | 0 | next; | ||||
298 | } | ||||||
299 | |||||||
300 | # Otherwise, the entry must be an update/add | ||||||
301 | 0 | 0 | my @keys = keys %$line; | ||||
302 | 0 | 0 | my @values = map $_,values %$line; | ||||
303 | |||||||
304 | 0 | 0 | 0 | if ($self->{writecriteria}->{index}) | |||
305 | { | ||||||
306 | 0 | 0 | my $update = "update ".$self->{'writecriteria'}->{'table'}. " set "; | ||||
307 | |||||||
308 | 0 | 0 | my @keys = keys %$line; | ||||
309 | 0 | 0 | my @values = map $_,values %$line; | ||||
310 | |||||||
311 | 0 | 0 | $update.=join "=?,",@keys; | ||||
312 | |||||||
313 | 0 | 0 | $update .="=? where "; | ||||
314 | 0 | 0 | $update .= $self->{'writecriteria'}->{'index'}; | ||||
315 | 0 | 0 | $update .="=?"; | ||||
316 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Updating $update, ".join ",",@values); | ||||
317 | |||||||
318 | 0 | 0 | my $stm = $self->{'writehandle'}->prepare($update); | ||||
319 | |||||||
320 | 0 | 0 | my $result = $stm->execute(@values,$line->{$self->{'writecriteria'}->{'index'}}); | ||||
321 | 0 | 0 | 0 | 0 | if (!$result || $result eq "0E0") | ||
322 | { | ||||||
323 | 0 | 0 | my $insert = "insert into ".$self->{'writecriteria'}->{'table'}." ("; | ||||
324 | 0 | 0 | $insert .= join ",",@keys; | ||||
325 | 0 | 0 | $insert .=") VALUES ("; | ||||
326 | 0 | 0 | $insert .=join ",",map { "?" } (0..scalar @values-1); | ||||
0 | 0 | ||||||
327 | 0 | 0 | $insert .=")"; | ||||
328 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Update failed, adding $insert, ".join ",",@values); | ||||
329 | 0 | 0 | $stm = $self->{'writehandle'}->prepare($insert); | ||||
330 | 0 | 0 | $result = $stm->execute(@values); | ||||
331 | } | ||||||
332 | 0 | 0 | 0 | 0 | if (!$result || $result eq "0E0") | ||
333 | { | ||||||
334 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: Add failed because ".$self->{'writehandle'}->errstr); | ||||
335 | 0 | 0 | $self->{'lasterror'}="ERROR: Add failed because ".$self->{'writehandle'}->errstr; | ||||
336 | } | ||||||
337 | 0 | 0 | $self->{'writeprogress'}->($line->{$self->{'writecriteria'}->{'index'}}); | ||||
338 | } | ||||||
339 | else | ||||||
340 | { | ||||||
341 | 0 | 0 | my $insert = "insert into ".$self->{'writecriteria'}->{'table'}." ("; | ||||
342 | 0 | 0 | $insert .= join ",",@keys; | ||||
343 | 0 | 0 | $insert .=") VALUES ("; | ||||
344 | 0 | 0 | $insert .=join ",",map { "?" } (0..scalar @values-1); | ||||
0 | 0 | ||||||
345 | 0 | 0 | $insert .=")"; | ||||
346 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Adding $insert, ".join ",",@values); | ||||
347 | 0 | 0 | my $stm = $self->{'writehandle'}->prepare($insert); | ||||
348 | 0 | 0 | my $result = $stm->execute(@values); | ||||
349 | } | ||||||
350 | } | ||||||
351 | |||||||
352 | } | ||||||
353 | |||||||
354 | |||||||
355 | |||||||
356 | |||||||
357 | ######################################################## | ||||||
358 | # writeldap - write to an ldap server | ||||||
359 | # | ||||||
360 | # takes object as param | ||||||
361 | # | ||||||
362 | # returns t/f | ||||||
363 | ######################################################### | ||||||
364 | sub writeldap | ||||||
365 | { | ||||||
366 | 0 | 0 | 0 | 0 | my $self = shift; | ||
367 | 0 | 0 | my $writedata = shift; | ||||
368 | |||||||
369 | #if ($self->{'writecriteria'}->{'hashattributes'}) | ||||||
370 | #{ | ||||||
371 | # my $checkedrecordset = $self->scanhashtable($writedata); | ||||||
372 | # $writedata = $checkedrecordset; | ||||||
373 | #} | ||||||
374 | |||||||
375 | |||||||
376 | 0 | 0 | foreach my $line (@$writedata) | ||||
377 | { | ||||||
378 | 0 | 0 | my $dn = $line->{'dn'}; | ||||
379 | |||||||
380 | 0 | 0 | delete $line->{'dn'}; # don't want the dn included in the hash of attrs to write | ||||
381 | |||||||
382 | 0 | 0 | 0 | 0 | if ($line->{'Data::Sync::Action'} && $line->{'Data::Sync::Action'} eq "DELETE") | ||
383 | { | ||||||
384 | 0 | 0 | my $result = $self->{'writehandle'}->delete($dn); | ||||
385 | 0 | 0 | 0 | if ($result->code) | |||
386 | { | ||||||
387 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Delete of $dn failed because ".$result->error); | ||||
388 | 0 | 0 | $self->{lasterror} = "Delete of $dn failed because ".$result->error; | ||||
389 | } | ||||||
390 | else | ||||||
391 | { | ||||||
392 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Deleted $dn"); | ||||
393 | } | ||||||
394 | 0 | 0 | next; | ||||
395 | } | ||||||
396 | |||||||
397 | # otherwise it's a modify or add | ||||||
398 | |||||||
399 | |||||||
400 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Modifying $dn, values ".join ",",values %$line); | ||||
401 | |||||||
402 | # experimental problem solution 20060212 - sunone/fedora and (possibly) AD will not permit a 'replace' on the objectclass | ||||||
403 | # attribute in a modify - so we want to cover that off. Remove from the structure /unless/ a flag is set (document in ::Advanced) | ||||||
404 | #my $modline = $line; | ||||||
405 | #if (!$self->{writeobjectclass}) | ||||||
406 | #{ | ||||||
407 | # for (keys %$modline) | ||||||
408 | # { | ||||||
409 | # print "$_ --\n"; | ||||||
410 | # if (lc($_) eq "objectclass") | ||||||
411 | # { | ||||||
412 | # delete $$modline{$_}; | ||||||
413 | # } | ||||||
414 | # } | ||||||
415 | #} | ||||||
416 | |||||||
417 | |||||||
418 | |||||||
419 | 0 | 0 | my $result = | ||||
420 | $self->{'writehandle'}->modify | ||||||
421 | ( | ||||||
422 | dn=>$dn, | ||||||
423 | replace=>[%$line] | ||||||
424 | ); | ||||||
425 | |||||||
426 | |||||||
427 | 0 | 0 | 0 | if ($result->code) | |||
428 | { | ||||||
429 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Modify failed '".$result->error."', adding $dn, values ".join ",",values %$line); | ||||
430 | 0 | 0 | $result = | ||||
431 | $self->{'writehandle'}->add | ||||||
432 | ( | ||||||
433 | dn=>$dn, | ||||||
434 | attrs=>[%$line] | ||||||
435 | ); | ||||||
436 | |||||||
437 | } | ||||||
438 | |||||||
439 | 0 | 0 | 0 | if ($result->code) | |||
440 | { | ||||||
441 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: ".$result->error); | ||||
442 | 0 | 0 | $self->{'lasterror'}="ERROR: Add failed :".$result->error; | ||||
443 | |||||||
444 | 0 | 0 | return undef; | ||||
445 | } | ||||||
446 | 0 | 0 | $self->{'writeprogress'}->("W"); | ||||
447 | } | ||||||
448 | 0 | 0 | return 1; | ||||
449 | } | ||||||
450 | |||||||
451 | |||||||
452 | |||||||
453 | |||||||
454 | ######################################################## | ||||||
455 | # sourceToAoH | ||||||
456 | # | ||||||
457 | # Convert data from source to an array of hashes | ||||||
458 | # so that there's a standard form to write data out | ||||||
459 | # | ||||||
460 | # takes data handle (LDAP result or DBI) | ||||||
461 | # | ||||||
462 | # returns ref to AoH | ||||||
463 | # | ||||||
464 | ######################################################## | ||||||
465 | sub sourceToAoH | ||||||
466 | { | ||||||
467 | 0 | 0 | 0 | 0 | my $self = shift; | ||
468 | 0 | 0 | my $handle = shift; | ||||
469 | |||||||
470 | 0 | 0 | my @records; | ||||
471 | 0 | 0 | my $counter=1; | ||||
472 | |||||||
473 | # Convert LDAP | ||||||
474 | 0 | 0 | 0 | if ($handle=~/LDAP/) | |||
475 | { | ||||||
476 | 0 | 0 | my $recordset = $self->ldapToAoH($handle); | ||||
477 | 0 | 0 | @records = @$recordset; | ||||
478 | } | ||||||
479 | |||||||
480 | # convert DBI | ||||||
481 | 0 | 0 | 0 | if ($handle=~/DBI/) | |||
482 | { | ||||||
483 | 0 | 0 | my $recordset = $self->dbiToAoH($handle); | ||||
484 | 0 | 0 | @records = @$recordset; | ||||
485 | } | ||||||
486 | |||||||
487 | |||||||
488 | # if it's an empty recordset return unddef | ||||||
489 | 0 | 0 | 0 | if (scalar @records == 0){return} | |||
0 | 0 | ||||||
490 | |||||||
491 | # check against the hash records if defined and remove if the record has not changed. | ||||||
492 | #if ($self->{'readcriteria'}->{'hashattributes'}) | ||||||
493 | #{ | ||||||
494 | # my $checkedrecordset = $self->scanhashtable(\@records); | ||||||
495 | # @records = @$checkedrecordset; | ||||||
496 | # | ||||||
497 | #} | ||||||
498 | |||||||
499 | 0 | 0 | return \@records; | ||||
500 | |||||||
501 | } | ||||||
502 | |||||||
503 | ############################################################# | ||||||
504 | # ldapToAoH - convert the content of an LDAP handle to an AoH | ||||||
505 | # takes - ldap handle | ||||||
506 | # returns - AoH | ||||||
507 | ############################################################# | ||||||
508 | sub ldapToAoH | ||||||
509 | { | ||||||
510 | 0 | 0 | 0 | 0 | my $self = shift; | ||
511 | 0 | 0 | my $handle = shift; | ||||
512 | |||||||
513 | 0 | 0 | my @records; | ||||
514 | 0 | 0 | my $counter=1; | ||||
515 | |||||||
516 | |||||||
517 | 0 | 0 | 0 | if ($self->{'readcriteria'}->{'batchsize'} >0) | |||
518 | { | ||||||
519 | 0 | 0 | while ($counter<= $self->{'readcriteria'}->{'batchsize'}) | ||||
520 | { | ||||||
521 | 0 | 0 | my $entry=$handle->shift_entry; | ||||
522 | 0 | 0 | 0 | if (!$entry){last} | |||
0 | 0 | ||||||
523 | 0 | 0 | my %record; | ||||
524 | 0 | 0 | for my $attrib ($entry->attributes) | ||||
525 | { | ||||||
526 | 0 | 0 | $record{$attrib} = $entry->get_value($attrib); | ||||
527 | } | ||||||
528 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Read ".$entry->dn." from the directory"); | ||||
529 | 0 | 0 | push @records,\%record; | ||||
530 | 0 | 0 | $counter++; | ||||
531 | 0 | 0 | $self->{'readprogress'}->($entry->dn); | ||||
532 | } | ||||||
533 | } | ||||||
534 | else | ||||||
535 | { | ||||||
536 | 0 | 0 | while (my $entry=$handle->shift_entry) | ||||
537 | { | ||||||
538 | 0 | 0 | my %record; | ||||
539 | 0 | 0 | for my $attrib ($entry->attributes) | ||||
540 | { | ||||||
541 | 0 | 0 | $record{$attrib} = $entry->get_value($attrib); | ||||
542 | } | ||||||
543 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Read ".$entry->dn." from the directory"); | ||||
544 | 0 | 0 | push @records,\%record; | ||||
545 | 0 | 0 | $counter++; | ||||
546 | 0 | 0 | $self->{'readprogress'}->($entry->dn); | ||||
547 | } | ||||||
548 | } | ||||||
549 | |||||||
550 | |||||||
551 | |||||||
552 | |||||||
553 | |||||||
554 | 0 | 0 | return \@records; | ||||
555 | } | ||||||
556 | |||||||
557 | ############################################################# | ||||||
558 | # dbiToAoH - converts a db handle to an AoH | ||||||
559 | # takes - DB handle | ||||||
560 | # returns - AoH | ||||||
561 | ############################################################# | ||||||
562 | sub dbiToAoH | ||||||
563 | { | ||||||
564 | 0 | 0 | 0 | 0 | my $self = shift; | ||
565 | 0 | 0 | my $handle = shift; | ||||
566 | |||||||
567 | 0 | 0 | my @records; | ||||
568 | 0 | 0 | my $counter=1; | ||||
569 | 0 | 0 | my $recordcounter=0; | ||||
570 | # this separation looks a bit strange, but combining into a single loop resulted in a segfault from DBI that I chased | ||||||
571 | # for HOURS! resolve at a later date. | ||||||
572 | 0 | 0 | 0 | if ($self->{'readcriteria'}->{'batchsize'} >0) | |||
573 | { | ||||||
574 | 0 | 0 | while ($counter <= $self->{'readcriteria'}->{'batchsize'}) | ||||
575 | { | ||||||
576 | 0 | 0 | my $entry = $handle->fetchrow_hashref; | ||||
577 | 0 | 0 | 0 | if (!$entry){last} | |||
0 | 0 | ||||||
578 | 0 | 0 | my %record; | ||||
579 | 0 | 0 | for my $attrib (keys %$entry) | ||||
580 | { | ||||||
581 | 0 | 0 | $record{$attrib} = $entry->{$attrib} | ||||
582 | } | ||||||
583 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Read entry ".++$recordcounter." from the database"); | ||||
584 | 0 | 0 | push @records,\%record; | ||||
585 | 0 | 0 | $counter++; | ||||
586 | |||||||
587 | 0 | 0 | $self->{'readprogress'}->(); | ||||
588 | } | ||||||
589 | } | ||||||
590 | else | ||||||
591 | { | ||||||
592 | 0 | 0 | while (my $entry = $handle->fetchrow_hashref) | ||||
593 | { | ||||||
594 | 0 | 0 | my %record; | ||||
595 | 0 | 0 | for my $attrib (keys %$entry) | ||||
596 | { | ||||||
597 | 0 | 0 | $record{$attrib} = $entry->{$attrib} | ||||
598 | } | ||||||
599 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Read entry ".++$recordcounter." from the database"); | ||||
600 | 0 | 0 | push @records,\%record; | ||||
601 | 0 | 0 | $self->{'readprogress'}->(); | ||||
602 | } | ||||||
603 | } | ||||||
604 | |||||||
605 | 0 | 0 | return \@records; | ||||
606 | } | ||||||
607 | |||||||
608 | ############################################################# | ||||||
609 | # hashrecord - take a record as a hash, and return the MD5 | ||||||
610 | # hash | ||||||
611 | # | ||||||
612 | # takes hashref of record, arrayref of attribs to hash | ||||||
613 | # | ||||||
614 | ############################################################# | ||||||
615 | sub hashrecord | ||||||
616 | { | ||||||
617 | 0 | 0 | 0 | 0 | require Digest::MD5; | ||
618 | 0 | 0 | my $self = shift; | ||||
619 | 0 | 0 | my $record = shift; | ||||
620 | 0 | 0 | my $attribs = shift; | ||||
621 | |||||||
622 | # make a hash of the current record | ||||||
623 | 0 | 0 | my $attribstring; | ||||
624 | 0 | 0 | for (@$attribs) | ||||
625 | { | ||||||
626 | 0 | 0 | 0 | if (!ref($_)) | |||
627 | { | ||||||
628 | 0 | 0 | $attribstring .= $$record{$_}; | ||||
629 | } | ||||||
630 | } | ||||||
631 | |||||||
632 | 0 | 0 | my $newhash = Digest::MD5->new; | ||||
633 | 0 | 0 | $newhash->add($attribstring); | ||||
634 | |||||||
635 | 0 | 0 | return $newhash->hexdigest; | ||||
636 | } | ||||||
637 | |||||||
638 | ############################################################# | ||||||
639 | # scanhashtable - run through a record set checking records | ||||||
640 | # against the stored hash table | ||||||
641 | # takes - a record set (AoH) | ||||||
642 | # returns - a record set minus unchanged records. | ||||||
643 | ############################################################# | ||||||
644 | sub scanhashtable | ||||||
645 | { | ||||||
646 | 0 | 0 | 0 | 0 | my $self = shift; | ||
647 | 0 | 0 | my $recordset = shift; | ||||
648 | |||||||
649 | 0 | 0 | require DBI; | ||||
650 | 0 | 0 | require Digest::MD5; | ||||
651 | |||||||
652 | 0 | 0 | my @records = @$recordset; | ||||
653 | 0 | 0 | my @hashcheckedrecords; | ||||
654 | |||||||
655 | 0 | 0 | 0 | my $hashdb = DBI->connect("DBI:SQLite:dbname=".$self->{'name'},"","") or die $!; | |||
656 | |||||||
657 | # check the hash table for this database exists - if not, create it | ||||||
658 | 0 | 0 | my $stm = $hashdb->prepare("select * from hashtable"); | ||||
659 | |||||||
660 | 0 | 0 | 0 | if (!$stm) | |||
661 | { | ||||||
662 | 0 | 0 | $stm = $hashdb->prepare ("create table hashtable (targetkey CHAR(100),attribhash CHAR(32), status CHAR(1))"); | ||||
663 | 0 | 0 | $stm->execute; | ||||
664 | } | ||||||
665 | else | ||||||
666 | { | ||||||
667 | # tombstone previously deleted entries | ||||||
668 | 0 | 0 | my $tmbstm = $hashdb->prepare("update hashtable set status=? where status=?"); | ||||
669 | 0 | 0 | my $result = $tmbstm->execute("T","D"); | ||||
670 | 0 | 0 | 0 | 0 | if (!$result || $result eq "0E0") | ||
671 | { | ||||||
672 | 0 | 0 | $self->{log}->($self->{loghandle},"Can't update status of previously deleted entries - expect write errors"); | ||||
673 | } | ||||||
674 | # mark all entries deleted, any that still exist will be marked update/exists | ||||||
675 | 0 | 0 | my $delstm = $hashdb->prepare("update hashtable set status=? where status != ?"); | ||||
676 | 0 | 0 | $result = $delstm->execute("D","T"); | ||||
677 | 0 | 0 | 0 | 0 | if (!$result || $result eq "0E0") | ||
678 | { | ||||||
679 | 0 | 0 | $self->{log}->($self->{loghandle},"Can't update status of all entries - deltas may fail"); | ||||
680 | } | ||||||
681 | } | ||||||
682 | |||||||
683 | 0 | 0 | my $getstm = $hashdb->prepare ("select attribhash from hashtable where targetkey=?"); | ||||
684 | 0 | 0 | my $putstm = $hashdb->prepare("insert into hashtable (targetkey,attribhash) VALUES (?,?)"); | ||||
685 | 0 | 0 | my $updstm = $hashdb->prepare("update hashtable set attribhash=? where targetkey=?"); | ||||
686 | 0 | 0 | my $statusstm = $hashdb->prepare("update hashtable set status=? where targetkey=?"); | ||||
687 | |||||||
688 | 0 | 0 | for my $record (@records) | ||||
689 | { | ||||||
690 | 0 | 0 | $getstm->execute(${$record}{$self->{'writecriteria'}->{'index'}}); | ||||
0 | 0 | ||||||
691 | 0 | 0 | my $oldhash = $getstm->fetchrow; | ||||
692 | |||||||
693 | 0 | 0 | my $newhash = $self->hashrecord($record,\@{$self->{writecriteria}->{hashattributes}}); | ||||
0 | 0 | ||||||
694 | |||||||
695 | 0 | 0 | 0 | if (!$oldhash) | |||
0 | |||||||
696 | { | ||||||
697 | 0 | 0 | $putstm->execute(${$record}{$self->{'writecriteria'}->{'index'}},$newhash); | ||||
0 | 0 | ||||||
698 | 0 | 0 | push @hashcheckedrecords,$record; | ||||
699 | 0 | 0 | $statusstm->execute("N",${$record}{$self->{writecriteria}->{index}}); | ||||
0 | 0 | ||||||
700 | } | ||||||
701 | elsif($oldhash ne $newhash) | ||||||
702 | { | ||||||
703 | 0 | 0 | $updstm->execute($newhash,${$record}{$self->{'writecriteria'}->{'index'}}); | ||||
0 | 0 | ||||||
704 | 0 | 0 | push @hashcheckedrecords,$record; | ||||
705 | 0 | 0 | $statusstm->execute("U",${$record}{$self->{writecriteria}->{index}}); | ||||
0 | 0 | ||||||
706 | } | ||||||
707 | else | ||||||
708 | { | ||||||
709 | 0 | 0 | $statusstm->execute("E",${$record}{$self->{writecriteria}->{index}}); | ||||
0 | 0 | ||||||
710 | } | ||||||
711 | } | ||||||
712 | 0 | 0 | return \@hashcheckedrecords; | ||||
713 | } | ||||||
714 | |||||||
715 | ############################################################# | ||||||
716 | # getdeletes - get a list of the deleted records | ||||||
717 | # takes - null | ||||||
718 | # returns - arrayref of deleted entries | ||||||
719 | ############################################################# | ||||||
720 | sub getdeletes | ||||||
721 | { | ||||||
722 | 0 | 0 | 0 | 0 | my $self = shift; | ||
723 | 0 | 0 | 0 | my $hashdb = DBI->connect("DBI:SQLite:dbname=".$self->{'name'},"","") or die $!; | |||
724 | |||||||
725 | |||||||
726 | 0 | 0 | my $stm = $hashdb->prepare("select targetkey from hashtable where status=?"); | ||||
727 | 0 | 0 | my $result = $stm->execute("D"); | ||||
728 | |||||||
729 | |||||||
730 | 0 | 0 | 0 | if (!$result) | |||
731 | { | ||||||
732 | return | ||||||
733 | 0 | 0 | } | ||||
734 | else | ||||||
735 | { | ||||||
736 | #return $stm->fetchall_arrayref(); | ||||||
737 | # fits into the existing code better to return a hash of index=>value | ||||||
738 | 0 | 0 | my @deleteds; | ||||
739 | 0 | 0 | while (my $entry = $stm->fetchrow_array) | ||||
740 | { | ||||||
741 | 0 | 0 | push @deleteds,{$self->{writecriteria}->{index} => $entry}; | ||||
742 | } | ||||||
743 | 0 | 0 | return \@deleteds; | ||||
744 | } | ||||||
745 | } | ||||||
746 | |||||||
747 | ############################################################# | ||||||
748 | # deletes - define the behaviour for deleted records | ||||||
749 | # | ||||||
750 | # takes - hash of named params | ||||||
751 | # returns - success/fail | ||||||
752 | ############################################################# | ||||||
753 | sub deletes | ||||||
754 | { | ||||||
755 | 0 | 0 | 1 | 0 | my $self = shift; | ||
756 | 0 | 0 | my $params; | ||||
757 | 0 | 0 | 0 | if (!@_){return} # don't set anything if blank | |||
0 | 0 | ||||||
758 | 0 | 0 | 0 | 0 | if (scalar @_ == 1 && $_[0] =~/delete/i) | ||
759 | { | ||||||
760 | 0 | 0 | $params = "delete"; | ||||
761 | } | ||||||
762 | else | ||||||
763 | { | ||||||
764 | 0 | 0 | my %paramshash = @_; | ||||
765 | 0 | 0 | $params = \%paramshash; | ||||
766 | } | ||||||
767 | |||||||
768 | 0 | 0 | $self->{deleteactions} = $params; | ||||
769 | |||||||
770 | 0 | 0 | return 1; | ||||
771 | } | ||||||
772 | |||||||
773 | |||||||
774 | ############################################################# | ||||||
775 | # Run - read the data, transform it, then write it. | ||||||
776 | # | ||||||
777 | # takes no parameters (apart from object) | ||||||
778 | # returns success or fail. | ||||||
779 | # | ||||||
780 | ############################################################# | ||||||
781 | sub run | ||||||
782 | { | ||||||
783 | 0 | 0 | 1 | 0 | my $self = shift; | ||
784 | |||||||
785 | # fetch from source | ||||||
786 | 0 | 0 | my $receivedata = $self->{'read'}->($self); | ||||
787 | |||||||
788 | # If we don't get anything back, return 0 | ||||||
789 | 0 | 0 | 0 | if (!$receivedata){return} | |||
0 | 0 | ||||||
790 | |||||||
791 | 0 | 0 | my $result; | ||||
792 | |||||||
793 | 0 | 0 | my $AoHdata=[]; | ||||
794 | 0 | 0 | while ($AoHdata) | ||||
795 | { | ||||||
796 | # convert to an AoH | ||||||
797 | 0 | 0 | my $AoHdata = $self->sourceToAoH($receivedata); | ||||
798 | 0 | 0 | 0 | if (!$AoHdata){last} | |||
0 | 0 | ||||||
799 | |||||||
800 | # construct templated attributes | ||||||
801 | 0 | 0 | $AoHdata = $self->makebuiltattributes($AoHdata); | ||||
802 | |||||||
803 | # remap attrib names to target names | ||||||
804 | 0 | 0 | $AoHdata = $self->remap($AoHdata); | ||||
805 | |||||||
806 | # perform data transforms | ||||||
807 | 0 | 0 | $AoHdata = $self->runtransform($AoHdata); | ||||
808 | |||||||
809 | 0 | 0 | 0 | if ($self->{validation}) | |||
810 | { | ||||||
811 | 0 | 0 | my $result = $self->validate($AoHdata); | ||||
812 | 0 | 0 | 0 | if (!$result) | |||
813 | { | ||||||
814 | 0 | 0 | $self->{lasterror} = "ERROR: Validation failed"; | ||||
815 | 0 | 0 | return undef; | ||||
816 | } | ||||||
817 | |||||||
818 | } | ||||||
819 | |||||||
820 | # check against hashtable | ||||||
821 | 0 | 0 | 0 | if ($self->{writecriteria}->{hashattributes}) | |||
822 | { | ||||||
823 | 0 | 0 | $AoHdata = $self->scanhashtable($AoHdata); | ||||
824 | } | ||||||
825 | |||||||
826 | # handle deletions here - it MUST be after scanhashtable - note, it might be handy to put this in a separate function for ease of overloading. | ||||||
827 | 0 | 0 | 0 | if ($self->{deleteactions}) | |||
828 | { | ||||||
829 | 0 | 0 | my $deletes = $self->getdeletes(); | ||||
830 | |||||||
831 | 0 | 0 | 0 | if ($deletes) | |||
832 | { | ||||||
833 | 0 | 0 | 0 | if ($self->{deleteactions} eq "delete") | |||
834 | { | ||||||
835 | 0 | 0 | for my $record (@$deletes) | ||||
836 | { | ||||||
837 | # It's very unlikely that 'Data::Sync::Action' will collide with a true field name | ||||||
838 | 0 | 0 | $record->{"Data::Sync::Action"} = "DELETE"; | ||||
839 | } | ||||||
840 | } | ||||||
841 | else # set up an update | ||||||
842 | { | ||||||
843 | #$deletes = $self->runtransform($deletes,$self->{deleteactions}); | ||||||
844 | 0 | 0 | for my $entry (@$deletes) | ||||
845 | { | ||||||
846 | 0 | 0 | %$entry = (%$entry,%{$self->{deleteactions}}); | ||||
0 | 0 | ||||||
847 | } | ||||||
848 | |||||||
849 | } | ||||||
850 | # need to build attributes, remap and run transforms on deletes (for building DNs & field mappings) | ||||||
851 | #$deletes = $self->makebuiltattributes($deletes); | ||||||
852 | #$deletes = $self->remap($deletes); | ||||||
853 | #$deletes = $self->runtransform($deletes); | ||||||
854 | 0 | 0 | $AoHdata = \(@$AoHdata,@$deletes); | ||||
855 | } | ||||||
856 | } | ||||||
857 | |||||||
858 | # write to target | ||||||
859 | 0 | 0 | $result = $self->{'write'}->($self,$AoHdata); | ||||
860 | |||||||
861 | # jump out if not in batch mode | ||||||
862 | 0 | 0 | 0 | if ($self->{'readcriteria'}->{'batchsize'} == 0){last} | |||
0 | 0 | ||||||
863 | |||||||
864 | } | ||||||
865 | |||||||
866 | #set the timestamp | ||||||
867 | 0 | 0 | my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime(time); | ||||
868 | 0 | 0 | $mon+=1; | ||||
869 | 0 | 0 | $year+=1900; | ||||
870 | 0 | 0 | $self->{'lastruntime'} = sprintf("%4d%02d%02d%02d%02d%02d",$year,$mon,$mday,$hour,$min,$sec); | ||||
871 | |||||||
872 | 0 | 0 | return $result; | ||||
873 | } | ||||||
874 | |||||||
875 | ############################################################# | ||||||
876 | # mappings - define mappings from source to target | ||||||
877 | # | ||||||
878 | # takes hash of sourceattrib=>targetattrib | ||||||
879 | # | ||||||
880 | # returns success or fail | ||||||
881 | # | ||||||
882 | ############################################################# | ||||||
883 | sub mappings | ||||||
884 | { | ||||||
885 | 0 | 0 | 1 | 0 | my $self = shift; | ||
886 | 0 | 0 | my %params = @_; | ||||
887 | |||||||
888 | 0 | 0 | $self->{'map'} = \%params; | ||||
889 | |||||||
890 | 0 | 0 | return 1; | ||||
891 | } | ||||||
892 | |||||||
893 | ############################################################## | ||||||
894 | # remap - rename source keys in data to target keys | ||||||
895 | # | ||||||
896 | # takes data structure in AoH form | ||||||
897 | # returns data structure in AoH form | ||||||
898 | # | ||||||
899 | ############################################################## | ||||||
900 | sub remap | ||||||
901 | { | ||||||
902 | 0 | 0 | 0 | 0 | my $self = shift; | ||
903 | 0 | 0 | my $data = shift; | ||||
904 | 0 | 0 | my @newdata; | ||||
905 | |||||||
906 | 0 | 0 | for my $line (@$data) | ||||
907 | { | ||||||
908 | 0 | 0 | my %record; | ||||
909 | 0 | 0 | for my $attrib (keys %$line) | ||||
910 | { | ||||||
911 | # retain unchanged name if nothing in map | ||||||
912 | |||||||
913 | 0 | 0 | 0 | if ($self->{'map'}->{$attrib}) | |||
914 | { | ||||||
915 | 0 | 0 | $record{$self->{'map'}->{$attrib}} = $$line{$attrib}; | ||||
916 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Remapped ".$attrib." to ".$self->{'map'}->{$attrib}); | ||||
917 | } | ||||||
918 | else | ||||||
919 | { | ||||||
920 | 0 | 0 | $record{$attrib}=$$line{$attrib} | ||||
921 | } | ||||||
922 | } | ||||||
923 | |||||||
924 | 0 | 0 | push @newdata,\%record; | ||||
925 | } | ||||||
926 | |||||||
927 | 0 | 0 | return \@newdata; | ||||
928 | } | ||||||
929 | |||||||
930 | |||||||
931 | ############################################################## | ||||||
932 | # transforms - define transformations of data | ||||||
933 | # | ||||||
934 | # takes hash of params | ||||||
935 | # returns success or fail | ||||||
936 | # | ||||||
937 | ############################################################## | ||||||
938 | sub transforms | ||||||
939 | { | ||||||
940 | 3 | 3 | 1 | 108 | my $self = shift; | ||
941 | |||||||
942 | 3 | 20 | $self->{transformations} = $self->maketrfunctions(@_); | ||||
943 | |||||||
944 | 3 | 8 | return 1; | ||||
945 | } | ||||||
946 | |||||||
947 | ############################################################## | ||||||
948 | # maketrfunctions - convert the various transform functions | ||||||
949 | # into coderefs etc | ||||||
950 | # | ||||||
951 | # takes hash of params | ||||||
952 | # returns hash of coderefs etc | ||||||
953 | ############################################################## | ||||||
954 | sub maketrfunctions | ||||||
955 | { | ||||||
956 | 3 | 3 | 0 | 5 | my $self = shift; | ||
957 | |||||||
958 | 3 | 23 | my %params=@_; | ||||
959 | |||||||
960 | # params are attrib=>regexstring or attrib=>coderef | ||||||
961 | |||||||
962 | # if param is a regex, transform to a coderef | ||||||
963 | 3 | 20 | for (keys %params) | ||||
964 | { | ||||||
965 | # capture the concatenate special case | ||||||
966 | 19 | 100 | 116 | if ($params{$_} =~/^concatenate$/){} | |||
100 | |||||||
100 | |||||||
967 | # otherwise turn a function name into a coderef | ||||||
968 | elsif ($params{$_} =~/^\w+$/) | ||||||
969 | { | ||||||
970 | 9 | 13 | $params{$_} = \&{$params{$_}}; | ||||
9 | 37 | ||||||
971 | } | ||||||
972 | elsif ($params{$_} !~/CODE/) | ||||||
973 | { | ||||||
974 | 5 | 837 | $params{$_}=eval "sub { #!#$_#!# | ||||
975 | my \$data=shift; | ||||||
976 | \$data =~".$params{$_}."; | ||||||
977 | return \$data;}"; | ||||||
978 | } | ||||||
979 | } | ||||||
980 | |||||||
981 | 3 | 163 | return \%params; | ||||
982 | |||||||
983 | |||||||
984 | } | ||||||
985 | |||||||
986 | ############################################################## | ||||||
987 | # runtransform - perform regexes and data transforms on | ||||||
988 | # the data | ||||||
989 | # | ||||||
990 | # takes AoH | ||||||
991 | # returns AoH | ||||||
992 | # | ||||||
993 | ############################################################## | ||||||
994 | sub runtransform | ||||||
995 | { | ||||||
996 | |||||||
997 | 3 | 3 | 0 | 16 | my $self = shift; | ||
998 | 3 | 6 | my $inData = shift; | ||||
999 | 3 | 8 | my $transformations = shift; | ||||
1000 | 3 | 50 | 11 | if (!$transformations){$transformations = $self->{transformations}} | |||
3 | 8 | ||||||
1001 | |||||||
1002 | 3 | 6 | my @outData; | ||||
1003 | |||||||
1004 | 3 | 7 | for my $line (@$inData) | ||||
1005 | { | ||||||
1006 | 15 | 15 | my %record; | ||||
1007 | 15 | 40 | for my $attrib (keys %$line) | ||||
1008 | { | ||||||
1009 | # only convert if there is a transform for this | ||||||
1010 | 36 | 100 | 71 | if ($transformations->{$attrib}) | |||
1011 | { | ||||||
1012 | 34 | 44 | my $before = $$line{$attrib}; | ||||
1013 | # handle possible multi valued attribs | ||||||
1014 | 34 | 80 | $record{$attrib} = $self->recursiveTransform($$line{$attrib},$transformations->{$attrib}); | ||||
1015 | } | ||||||
1016 | else | ||||||
1017 | { | ||||||
1018 | 2 | 6 | $record{$attrib} = $$line{$attrib} | ||||
1019 | } | ||||||
1020 | } | ||||||
1021 | 15 | 31 | push @outData,\%record; | ||||
1022 | 15 | 34 | $self->{'transformprogress'}->(); | ||||
1023 | } | ||||||
1024 | |||||||
1025 | 3 | 17 | return \@outData; | ||||
1026 | |||||||
1027 | } | ||||||
1028 | |||||||
1029 | ############################################################ | ||||||
1030 | # recursiveTransform - recursively transform data | ||||||
1031 | # | ||||||
1032 | # takes attrib,$transformation | ||||||
1033 | # | ||||||
1034 | # returns transformed attrib | ||||||
1035 | # | ||||||
1036 | ############################################################ | ||||||
1037 | sub recursiveTransform | ||||||
1038 | { | ||||||
1039 | 58 | 58 | 0 | 74 | my $self = shift; | ||
1040 | 58 | 58 | my $data = shift; | ||||
1041 | 58 | 61 | my $transformation = shift; | ||||
1042 | |||||||
1043 | |||||||
1044 | # if the transformation is to join values together | ||||||
1045 | 58 | 100 | 100 | 458 | if ($data =~/ARRAY/ && $transformation eq "concatenate") | ||
100 | |||||||
50 | |||||||
50 | |||||||
1046 | { | ||||||
1047 | 2 | 9 | my $string = join $self->{'mvseparator'},@$data; | ||||
1048 | 2 | 4 | $data = $string; | ||||
1049 | } | ||||||
1050 | # otherwise act on each instance | ||||||
1051 | elsif ($data =~/ARRAY/) | ||||||
1052 | { | ||||||
1053 | 12 | 29 | for (0..scalar @$data -1) | ||||
1054 | { | ||||||
1055 | 24 | 66 | $$data[$_] = $self->recursiveTransform($$data[$_],$transformation); | ||||
1056 | } | ||||||
1057 | } | ||||||
1058 | |||||||
1059 | elsif ($data =~/HASH/) | ||||||
1060 | { | ||||||
1061 | 0 | 0 | for my $inst (keys %$data) | ||||
1062 | { | ||||||
1063 | 0 | 0 | $$data{$inst} = $self->recursiveTransform($$data{$inst},$transformation); | ||||
1064 | } | ||||||
1065 | } | ||||||
1066 | |||||||
1067 | elsif ($transformation =~/CODE/) | ||||||
1068 | { | ||||||
1069 | 44 | 52 | my $before = $data; | ||||
1070 | 44 | 512 | $data = $transformation->($data); | ||||
1071 | 44 | 289 | $self->{'log'}->($self->{'loghandle'},"Transformed $before to ".$data); | ||||
1072 | } | ||||||
1073 | |||||||
1074 | 58 | 608 | return $data; | ||||
1075 | } | ||||||
1076 | |||||||
1077 | ########################################################## | ||||||
1078 | # buildattributes - fake up attributes from source data | ||||||
1079 | # | ||||||
1080 | # takes attribname=>'' where %NAME% is the source | ||||||
1081 | # data | ||||||
1082 | # Note: this runs before runtransform, so data can be | ||||||
1083 | # templated here, then transformed in runtransform for more | ||||||
1084 | # sophisticated constructions | ||||||
1085 | # | ||||||
1086 | # returns success\fail | ||||||
1087 | # | ||||||
1088 | ########################################################### | ||||||
1089 | sub buildattributes | ||||||
1090 | { | ||||||
1091 | 0 | 0 | 1 | 0 | my $self = shift; | ||
1092 | 0 | 0 | my %attribs = @_; | ||||
1093 | |||||||
1094 | 0 | 0 | $self->{'buildattribs'} = \%attribs; | ||||
1095 | |||||||
1096 | 0 | 0 | return 1; | ||||
1097 | } | ||||||
1098 | |||||||
1099 | ############################################################# | ||||||
1100 | # makebuiltattributes - add built attributes to the data map | ||||||
1101 | # | ||||||
1102 | # takes AoH | ||||||
1103 | # returns AoH | ||||||
1104 | # | ||||||
1105 | ############################################################# | ||||||
1106 | sub makebuiltattributes | ||||||
1107 | { | ||||||
1108 | 0 | 0 | 0 | 0 | my $self = shift; | ||
1109 | |||||||
1110 | 0 | 0 | my $indataref = shift; | ||||
1111 | 0 | 0 | my @inData = @$indataref; | ||||
1112 | |||||||
1113 | 0 | 0 | for my $line (@inData) | ||||
1114 | { | ||||||
1115 | 0 | 0 | for my $newattrib (keys %{$self->{'buildattribs'}}) | ||||
0 | 0 | ||||||
1116 | { | ||||||
1117 | 0 | 0 | $$line{$newattrib} = $self->{'buildattribs'}->{$newattrib}; | ||||
1118 | # s/// the template | ||||||
1119 | 0 | 0 | $$line{$newattrib} =~s/%(.*?)%/$$line{$1}/g; | ||||
1120 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"Created attribute $newattrib containing ".$$line{$newattrib}); | ||||
1121 | } | ||||||
1122 | } | ||||||
1123 | |||||||
1124 | 0 | 0 | return \@inData; | ||||
1125 | } | ||||||
1126 | |||||||
1127 | ###################################################### | ||||||
1128 | # writelog - write logging information | ||||||
1129 | # | ||||||
1130 | # takes $fh,$string | ||||||
1131 | # | ||||||
1132 | # returns undef | ||||||
1133 | # | ||||||
1134 | ###################################################### | ||||||
1135 | sub writelog | ||||||
1136 | { | ||||||
1137 | 12 | 12 | 0 | 14 | my $fh = shift; | ||
1138 | 12 | 13 | my $string = shift; | ||||
1139 | 12 | 50 | 23 | if ($fh eq "STDOUT") | |||
1140 | { | ||||||
1141 | 0 | 0 | print $string."\n"; | ||||
1142 | } | ||||||
1143 | else | ||||||
1144 | { | ||||||
1145 | 12 | 36 | print $fh $string."\n"; | ||||
1146 | } | ||||||
1147 | |||||||
1148 | 12 | 19 | return; | ||||
1149 | } | ||||||
1150 | |||||||
1151 | ######################################################## | ||||||
1152 | # log - log an entry | ||||||
1153 | # takes message | ||||||
1154 | # returns nothing | ||||||
1155 | ######################################################## | ||||||
1156 | sub log | ||||||
1157 | { | ||||||
1158 | 0 | 0 | 0 | 0 | my $self = shift; | ||
1159 | 0 | 0 | my $message = shift; | ||||
1160 | |||||||
1161 | 0 | 0 | $self->{log}->($self->{loghandle},$message); | ||||
1162 | } | ||||||
1163 | |||||||
1164 | |||||||
1165 | |||||||
1166 | ######################################################################## | ||||||
1167 | # save - saves config to a DDS file | ||||||
1168 | # | ||||||
1169 | # takes filename | ||||||
1170 | # returns success or fail | ||||||
1171 | # | ||||||
1172 | ######################################################################## | ||||||
1173 | sub save | ||||||
1174 | { | ||||||
1175 | 0 | 0 | 1 | 0 | my $self = shift; | ||
1176 | 0 | 0 | my $filename = shift; | ||||
1177 | |||||||
1178 | 0 | 0 | require Data::Dump::Streamer; | ||||
1179 | |||||||
1180 | 0 | 0 | 0 | if (!$filename) | |||
1181 | { | ||||||
1182 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: No filename supplied to save"); | ||||
1183 | 0 | 0 | $self->{'lasterror'}="ERROR: No filename supplied to save"; | ||||
1184 | } | ||||||
1185 | |||||||
1186 | 0 | 0 | my $fh; | ||||
1187 | open ($fh,">",$filename) or do | ||||||
1188 | 0 | 0 | 0 | { | |||
1189 | 0 | 0 | $self->{'lasterror'} = "Unable to open $filename for writing because $!"; | ||||
1190 | 0 | 0 | return; | ||||
1191 | }; | ||||||
1192 | |||||||
1193 | |||||||
1194 | 0 | 0 | my $dds = Data::Dump::Streamer->new; | ||||
1195 | |||||||
1196 | # clone the object and remove non serialisable or unwanted keys | ||||||
1197 | 0 | 0 | my $clone = \%$self; | ||||
1198 | 0 | 0 | delete $clone->{'writehandle'}; | ||||
1199 | 0 | 0 | delete $clone->{'readhandle'}; | ||||
1200 | 0 | 0 | delete $clone->{'loghandle'}; | ||||
1201 | 0 | 0 | delete $clone->{'log'}; | ||||
1202 | |||||||
1203 | 0 | 0 | print $fh $dds->Dump($clone)->Out(); | ||||
1204 | |||||||
1205 | 0 | 0 | close $fh; | ||||
1206 | |||||||
1207 | 0 | 0 | return 1; | ||||
1208 | } | ||||||
1209 | |||||||
1210 | ####################################################################### | ||||||
1211 | # load - read back a config file | ||||||
1212 | # | ||||||
1213 | # takes filename | ||||||
1214 | # returns 1 on success, 0 on fail | ||||||
1215 | # | ||||||
1216 | ####################################################################### | ||||||
1217 | sub load | ||||||
1218 | { | ||||||
1219 | 0 | 0 | 1 | 0 | my $self = shift; | ||
1220 | 0 | 0 | my $filename = shift; | ||||
1221 | |||||||
1222 | 0 | 0 | require Data::Dump::Streamer; | ||||
1223 | |||||||
1224 | 0 | 0 | 0 | if (!$filename) | |||
1225 | { | ||||||
1226 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: No filename supplied to save"); | ||||
1227 | 0 | 0 | $self->{'lasterror'}="ERROR: No filename supplied to save"; | ||||
1228 | } | ||||||
1229 | |||||||
1230 | 0 | 0 | my $Data_Sync1; # this is what Data::Dump::Streamer calls the object | ||||
1231 | my $fh; | ||||||
1232 | open ($fh,"<",$filename) or do | ||||||
1233 | 0 | 0 | 0 | { | |||
1234 | 0 | 0 | $self->{'lasterror'} = "Unable to open $filename for reading because $!"; | ||||
1235 | 0 | 0 | return; | ||||
1236 | }; | ||||||
1237 | 0 | 0 | my $evalstring; | ||||
1238 | 0 | 0 | while (<$fh>) | ||||
1239 | { | ||||||
1240 | 0 | 0 | $evalstring .=$_; | ||||
1241 | } | ||||||
1242 | |||||||
1243 | 0 | 0 | eval $evalstring; | ||||
1244 | 0 | 0 | my $successfulload; | ||||
1245 | 0 | 0 | for my $attrib (keys %$Data_Sync1) | ||||
1246 | { | ||||||
1247 | 0 | 0 | $self->{$attrib} = $Data_Sync1->{$attrib}; | ||||
1248 | 0 | 0 | $successfulload++; | ||||
1249 | } | ||||||
1250 | |||||||
1251 | 0 | 0 | 0 | if (!$successfulload) | |||
1252 | { | ||||||
1253 | 0 | 0 | 0 | if ($self->{'log'}) | |||
1254 | { | ||||||
1255 | 0 | 0 | $self->{'log'}->($self->{'loghandle'},"ERROR: Unsuccessful load from $filename") ; | ||||
1256 | } | ||||||
1257 | 0 | 0 | $self->{'lasterror'}="Unsuccessful load from $filename"; | ||||
1258 | 0 | 0 | return; | ||||
1259 | } | ||||||
1260 | |||||||
1261 | 0 | 0 | return 1; | ||||
1262 | |||||||
1263 | } | ||||||
1264 | |||||||
1265 | ######################################################################## | ||||||
1266 | # error - returns last error | ||||||
1267 | # | ||||||
1268 | # takes no parameter | ||||||
1269 | # returns error | ||||||
1270 | # | ||||||
1271 | ######################################################################## | ||||||
1272 | sub error | ||||||
1273 | { | ||||||
1274 | 0 | 0 | 1 | 0 | my $self = shift; | ||
1275 | |||||||
1276 | 0 | 0 | return $self->{'lasterror'}; | ||||
1277 | } | ||||||
1278 | |||||||
1279 | ######################################################################## | ||||||
1280 | # lastruntime - returns last run time | ||||||
1281 | # | ||||||
1282 | # no parameters, returns datetime as YYYYMMDDHHMMSS | ||||||
1283 | # | ||||||
1284 | ######################################################################## | ||||||
1285 | sub lastruntime | ||||||
1286 | { | ||||||
1287 | 0 | 0 | 1 | 0 | my $self = shift; | ||
1288 | 0 | 0 | return $self->{'lastruntime'}; | ||||
1289 | } | ||||||
1290 | |||||||
1291 | |||||||
1292 | |||||||
1293 | ####################################################################### | ||||||
1294 | # mvseparator - convenience function to change the multivalue | ||||||
1295 | # separator | ||||||
1296 | # | ||||||
1297 | # takes scalar or null | ||||||
1298 | # returns true or separator | ||||||
1299 | # | ||||||
1300 | ####################################################################### | ||||||
1301 | sub mvseparator | ||||||
1302 | { | ||||||
1303 | 2 | 2 | 1 | 1787 | my $self = shift; | ||
1304 | 2 | 15 | my $separator = shift; | ||||
1305 | 2 | 100 | 6 | if (!$separator){return $self->{mvseparator}} | |||
1 | 4 | ||||||
1306 | else | ||||||
1307 | { | ||||||
1308 | 1 | 4 | $self->{mvseparator} = $separator; | ||||
1309 | 1 | 3 | return 1; | ||||
1310 | } | ||||||
1311 | } | ||||||
1312 | |||||||
1313 | ######################################################################## | ||||||
1314 | # commit - commit the write handle | ||||||
1315 | # | ||||||
1316 | # takes null | ||||||
1317 | # returns result | ||||||
1318 | # | ||||||
1319 | ######################################################################## | ||||||
1320 | sub commit | ||||||
1321 | { | ||||||
1322 | 0 | 0 | 1 | 0 | my $self = shift; | ||
1323 | 0 | 0 | my $result; | ||||
1324 | |||||||
1325 | 0 | 0 | $result = $self->{target}->{writehandle}->commit(); | ||||
1326 | |||||||
1327 | 0 | 0 | return $result; | ||||
1328 | } | ||||||
1329 | |||||||
1330 | ######################################################################## | ||||||
1331 | # validate - checks that the data in the dataset is what you expect | ||||||
1332 | # | ||||||
1333 | # takes hash of param=>regex | ||||||
1334 | # | ||||||
1335 | # returns t/f | ||||||
1336 | # | ||||||
1337 | ######################################################################## | ||||||
1338 | sub validation | ||||||
1339 | { | ||||||
1340 | 1 | 1 | 1 | 7 | my $self = shift; | ||
1341 | 1 | 3 | my %params = @_; | ||||
1342 | |||||||
1343 | 1 | 4 | for my $attrib (keys %params) | ||||
1344 | { | ||||||
1345 | 2 | 416 | $self->{validation}->{$attrib} = eval "sub { my \$self =shift; | ||||
1346 | my \$string = shift; | ||||||
1347 | return \$string =~ $params{$attrib};}"; | ||||||
1348 | } | ||||||
1349 | |||||||
1350 | 1 | 4 | return 1; | ||||
1351 | } | ||||||
1352 | |||||||
1353 | ######################################################################### | ||||||
1354 | # validate - run the validation | ||||||
1355 | # | ||||||
1356 | # takes AoH | ||||||
1357 | # returns t/f & logs | ||||||
1358 | # | ||||||
1359 | ######################################################################### | ||||||
1360 | sub validate | ||||||
1361 | { | ||||||
1362 | 2 | 2 | 0 | 316 | my $self = shift; | ||
1363 | 2 | 3 | my $AoH = shift; | ||||
1364 | 2 | 3 | my $errorflag = 0; | ||||
1365 | |||||||
1366 | 2 | 5 | for my $record (@$AoH) | ||||
1367 | { | ||||||
1368 | |||||||
1369 | 13 | 11 | my $errorcounter = 0; | ||||
1370 | 13 | 26 | for my $attrib (keys %$record) | ||||
1371 | { | ||||||
1372 | # only try to validate the field if a validation pattern match is defined | ||||||
1373 | 30 | 100 | 67 | if (!$self->{validation}->{$attrib}){next} | |||
14 | 46 | ||||||
1374 | |||||||
1375 | # recursively validate, in case of multi valued attributes | ||||||
1376 | 16 | 177 | my $result = $self->recursivevalidate($$record{$attrib},$self->{validation}->{$attrib}); | ||||
1377 | 16 | 27 | $errorcounter +=(!$result); | ||||
1378 | } | ||||||
1379 | 13 | 71 | $self->{log}->($self->{loghandle},"Record ". join ",",values %$record," validated with $errorcounter errors"); | ||||
1380 | 13 | 100 | 36 | if ($errorcounter){$errorflag+= $errorcounter } | |||
1 | 2 | ||||||
1381 | } | ||||||
1382 | |||||||
1383 | 2 | 8 | $self->{log}->($self->{loghandle},"Validation completed with $errorflag errors"); | ||||
1384 | 2 | 100 | 5 | if ($errorflag > 0) {return} | |||
1 | 3 | ||||||
1 | 3 | ||||||
1385 | else {return 1} | ||||||
1386 | |||||||
1387 | } | ||||||
1388 | |||||||
1389 | ######################################################################## | ||||||
1390 | # recursivevalidate - run the validation coderefs against a complex | ||||||
1391 | # object | ||||||
1392 | ######################################################################## | ||||||
1393 | sub recursivevalidate | ||||||
1394 | { | ||||||
1395 | 28 | 28 | 0 | 414 | my $self = shift; | ||
1396 | 28 | 32 | my $value = shift; | ||||
1397 | 28 | 159 | my $matchref = shift; | ||||
1398 | |||||||
1399 | 28 | 100 | 185 | if (!ref($value)) | |||
50 | |||||||
0 | |||||||
1400 | { | ||||||
1401 | # take a copy of the value and regex that, just to make sure | ||||||
1402 | # we don't accidentally do a s/// | ||||||
1403 | 22 | 24 | my $tempvalue = $value; | ||||
1404 | 22 | 1375 | my $result = $matchref->($self,$tempvalue); | ||||
1405 | 22 | 48 | return $result; | ||||
1406 | } | ||||||
1407 | |||||||
1408 | elsif ($value =~ /ARRAY/) | ||||||
1409 | { | ||||||
1410 | 6 | 6 | my $errorcounter; | ||||
1411 | 6 | 17 | for (@$value) | ||||
1412 | { | ||||||
1413 | 12 | 970 | my $result = $self->recursivevalidate($_,$matchref); | ||||
1414 | 12 | 22 | $errorcounter +=(!$result); | ||||
1415 | } | ||||||
1416 | 6 | 50 | 13 | if ($errorcounter){return} | |||
0 | 0 | ||||||
1417 | 6 | 10 | return 1; | ||||
1418 | } | ||||||
1419 | |||||||
1420 | elsif ($value =~/HASH/) | ||||||
1421 | { | ||||||
1422 | |||||||
1423 | 0 | 0 | my $errorcounter; | ||||
1424 | 0 | 0 | for (values %$value) | ||||
1425 | { | ||||||
1426 | 0 | 0 | my $result = $self->recursivevalidate($_,$matchref); | ||||
1427 | 0 | 0 | $errorcounter +=(!$result); | ||||
1428 | } | ||||||
1429 | 0 | 0 | 0 | if ($errorcounter){return} | |||
0 | 0 | ||||||
1430 | 0 | 0 | return 1; | ||||
1431 | } | ||||||
1432 | |||||||
1433 | } | ||||||
1434 | |||||||
1435 | |||||||
1436 | ###################################################################################### | ||||||
1437 | # get - utility function to get a set of records | ||||||
1438 | ###################################################################################### | ||||||
1439 | sub get | ||||||
1440 | { | ||||||
1441 | 0 | 0 | 0 | 0 | my $self = shift; | ||
1442 | |||||||
1443 | 0 | 0 | my $result = $self->{read}->($self); | ||||
1444 | |||||||
1445 | 0 | 0 | 0 | if (!$result) { return undef } | |||
0 | 0 | ||||||
1446 | |||||||
1447 | 0 | 0 | my $recordset = $self->sourceToAoH($result); | ||||
1448 | |||||||
1449 | 0 | 0 | return $recordset; | ||||
1450 | |||||||
1451 | } | ||||||
1452 | |||||||
1453 | ####################################################################################### | ||||||
1454 | # put - utility function to write a set of records | ||||||
1455 | ####################################################################################### | ||||||
1456 | sub put | ||||||
1457 | { | ||||||
1458 | 0 | 0 | 0 | 0 | my $self = shift; | ||
1459 | 0 | 0 | my $recordset = shift; | ||||
1460 | 0 | 0 | my $result = $self->{write}->($self,$recordset); | ||||
1461 | 0 | 0 | return $result; | ||||
1462 | } | ||||||
1463 | |||||||
1464 | |||||||
1465 | |||||||
1466 | |||||||
1467 | |||||||
1468 | ######################################################################## | ||||||
1469 | ######################################################################## | ||||||
1470 | # transformation functions | ||||||
1471 | ######################################################################## | ||||||
1472 | ######################################################################## | ||||||
1473 | |||||||
1474 | sub stripspaces | ||||||
1475 | { | ||||||
1476 | 11 | 11 | 0 | 13 | my $var = shift; | ||
1477 | 11 | 39 | $var=~s/ //g; | ||||
1478 | 11 | 21 | return $var; | ||||
1479 | } | ||||||
1480 | |||||||
1481 | sub stripnewlines | ||||||
1482 | { | ||||||
1483 | 2 | 2 | 0 | 3 | my $var = shift; | ||
1484 | 2 | 9 | $var=~s/\n/ /g; | ||||
1485 | # (just in case) | ||||||
1486 | 2 | 5 | $var=~s/\r//g; | ||||
1487 | 2 | 5 | return $var; | ||||
1488 | } | ||||||
1489 | |||||||
1490 | sub uppercase | ||||||
1491 | { | ||||||
1492 | 2 | 2 | 0 | 52 | my $var = shift; | ||
1493 | 2 | 7 | return uc($var); | ||||
1494 | } | ||||||
1495 | |||||||
1496 | sub lowercase | ||||||
1497 | { | ||||||
1498 | 2 | 2 | 0 | 5 | my $var =shift; | ||
1499 | 2 | 6 | return lc($var); | ||||
1500 | } | ||||||
1501 | |||||||
1502 | |||||||
1503 | |||||||
1504 | 1; | ||||||
1505 | |||||||
1506 | ######################################################################### | ||||||
1507 | ######################################################################### | ||||||
1508 | # Nothing but POD from here on out | ||||||
1509 | ######################################################################### | ||||||
1510 | ######################################################################### | ||||||
1511 | |||||||
1512 | =pod | ||||||
1513 | |||||||
1514 | =head1 NAME | ||||||
1515 | |||||||
1516 | Data::Sync - A simple metadirectory/datapump module | ||||||
1517 | |||||||
1518 | =head1 SYNOPSIS | ||||||
1519 | |||||||
1520 | use Data::Sync; | ||||||
1521 | |||||||
1522 | my $sync = Data::Sync->new(log=>"STDOUT",[configfile=>"config.dds"],[jobname=>"MyJob"]); | ||||||
1523 | |||||||
1524 | $sync->source($dbhandle,{ | ||||||
1525 | select=>"select * from testtable", | ||||||
1526 | index=>"NAME", | ||||||
1527 | }); | ||||||
1528 | |||||||
1529 | or | ||||||
1530 | |||||||
1531 | $sync->source($ldaphandle,{filter=>"(cn=*)", | ||||||
1532 | scope=>"sub", | ||||||
1533 | base=>"ou=testcontainer,dc=test,dc=org"}); | ||||||
1534 | |||||||
1535 | $sync->target($dbhandle,{table=>'targettable', | ||||||
1536 | index=>'NAME'}); | ||||||
1537 | |||||||
1538 | or | ||||||
1539 | |||||||
1540 | $sync->target($ldaphandle); | ||||||
1541 | |||||||
1542 | $sync->mappings(FIRSTNAME=>'givenName',SURNAME=>'sn'); | ||||||
1543 | |||||||
1544 | $sync->buildattributes(dn=>"cn=%NAME%,ou=testcontainer,dc=test,dc=org", | ||||||
1545 | objectclass=>"organizationalUnit"); | ||||||
1546 | |||||||
1547 | $sync->transforms( PHONE=>'s/0(\d{4})/\+44 \($1\) /', | ||||||
1548 | ADDRESS=>sub{my $address=shift; | ||||||
1549 | $address=~s/\n/\ /g; |
||||||
1550 | return $address}); | ||||||
1551 | |||||||
1552 | $sync->validation( address=>"/street/i", | ||||||
1553 | name=>"/(Dr|Mr|Mrs|Ms|Miss)/" ); | ||||||
1554 | |||||||
1555 | $sync->save("filename"); | ||||||
1556 | |||||||
1557 | $sync->load("filename"); | ||||||
1558 | |||||||
1559 | $sync->run(); | ||||||
1560 | |||||||
1561 | print $sync->error(); | ||||||
1562 | |||||||
1563 | print $sync->lastruntime(); | ||||||
1564 | |||||||
1565 | print $sync->commit(); | ||||||
1566 | |||||||
1567 | (For more complex uses, see Data::Sync::Advanced) | ||||||
1568 | |||||||
1569 | =head1 DESCRIPTION | ||||||
1570 | |||||||
1571 | NB: 0.07 is an interim release - the next version will contain significant changes, including changes to the UI. | ||||||
1572 | |||||||
1573 | Data::Sync is a simple metadirectory/data pump module. It automates a number of the common tasks required when writing code to migrate/sync information from one datasource to another. | ||||||
1574 | |||||||
1575 | In order to use Data::Sync, you must define a source and a target. The first parameter to the source & target methods is a bound DBI/Net::LDAP handle. | ||||||
1576 | |||||||
1577 | Having defined your datasources, define how attributes map between them with mappings. If an attribute returned from the data source has no entry in the mapping table, it will be assumed that the name is the same in both databases. | ||||||
1578 | |||||||
1579 | Attributes can be built up from multiple other attributes using buildattributes. This uses a simple template, %FIELDNAME% which is replaced at run time with the value of the field from the current record. More complex modifications to data can be made with transforms, which runs after the built attributes are created. | ||||||
1580 | |||||||
1581 | Transforms can be made with the method transforms, which takes a hash of FIELDNAME=>transformation. This transformation can be one of three things: a regular expression in string form (see synopsis), the name of a predefined transformation supplied in Data::Sync, or a code ref. | ||||||
1582 | |||||||
1583 | Finally, if you are confident your data is all in the right format, use run. That will read the data from the source, modify it as you have specified, validate it against the pattern matches you've specified (if any) and write it to the target. | ||||||
1584 | |||||||
1585 | B |
||||||
1586 | |||||||
1587 | =head1 CONSTRUCTOR | ||||||
1588 | |||||||
1589 | my $sync = Data::Sync->new(log=>"STDOUT"); | ||||||
1590 | my $sync = Data::Sync->new(log=>$fh); | ||||||
1591 | my $sync = Data::Sync->new(configfile=>"config.dds"); | ||||||
1592 | my $sync = Data::Sync->new(jobname=>"MyJob"); | ||||||
1593 | |||||||
1594 | The constructor returns a Data::Sync object. Optionally, to use logging, pass the string STDOUT as the log parameter to print logging to STDOUT, or a lexical filehandle. You can specify a config file to get the configuration from, in which case you don't need to call mappings/transforms etc, although you'll still need pass the db/ldap handles (only) to source & target. | ||||||
1595 | |||||||
1596 | If you are using attribute hashing to minimise unnecessary writes, you should specify a jobname, as this is the name given to the SQLite hash database. | ||||||
1597 | |||||||
1598 | =head1 METHODS | ||||||
1599 | |||||||
1600 | =head2 source | ||||||
1601 | |||||||
1602 | $sync->source($dbhandle,{select=>"select * from testtable"}); | ||||||
1603 | |||||||
1604 | or | ||||||
1605 | |||||||
1606 | $sync->source($ldaphandle,{filter=>"(cn=*)", | ||||||
1607 | scope=>"sub", | ||||||
1608 | base=>"ou=testcontainer,dc=test,dc=org"}); | ||||||
1609 | |||||||
1610 | or | ||||||
1611 | |||||||
1612 | $sync->source($dbhandle); # only if loading config file | ||||||
1613 | |||||||
1614 | Requires a valid, bound (i.e. logged in) Net::LDAP or DBI handle, and a hash of parameters for the data source (assuming you aren't loading the config from a file). LDAP parameters are: | ||||||
1615 | filter | ||||||
1616 | scope | ||||||
1617 | base | ||||||
1618 | attrs | ||||||
1619 | controls | ||||||
1620 | |||||||
1621 | (See Net::LDAP for more details of these parameters). | ||||||
1622 | |||||||
1623 | DBI parameters are: | ||||||
1624 | select | ||||||
1625 | |||||||
1626 | |||||||
1627 | =head2 target | ||||||
1628 | |||||||
1629 | $sync->target($dbhandle,{table=>'targettable', | ||||||
1630 | index=>'NAME'}); | ||||||
1631 | |||||||
1632 | or | ||||||
1633 | |||||||
1634 | $sync->target($ldaphandle); | ||||||
1635 | |||||||
1636 | or | ||||||
1637 | |||||||
1638 | $sync->target($db); # only if loading config from a file | ||||||
1639 | |||||||
1640 | or | ||||||
1641 | |||||||
1642 | $sync->target($dbhandle,{table=>'targettable', | ||||||
1643 | index=>'NAME' | ||||||
1644 | hashattributes=>["ADDRESS","PHONE"] | ||||||
1645 | }); | ||||||
1646 | |||||||
1647 | or | ||||||
1648 | |||||||
1649 | $sync->target($ldaphandle,{hashattributes=>["ADDRESS","PHONE"]} | ||||||
1650 | ); | ||||||
1651 | |||||||
1652 | |||||||
1653 | Requires a valid, bound (i.e. logged in) DBI or Net::LDAP handle, and a hash of parameters (unless you are loading the config from a file). No parameters are required for LDAP data targets, but a dn must have been either read from the data source or constructed using buildattributes. Valid DBI parameters are | ||||||
1654 | |||||||
1655 | table - the table you wish to write to on the data target | ||||||
1656 | index - the attribute you wish to use as an index | ||||||
1657 | |||||||
1658 | There is no 'pre check' on datatypes or lengths, so if you attempt to write a record with an oversized or mismatched data type, it will fail with an error. | ||||||
1659 | |||||||
1660 | Note: if you are writing from DB to LDAP, you must construct all mandatory attributes using buildattributes, or additions will fail. | ||||||
1661 | |||||||
1662 | Attribute hashing can be specified with the keys: | ||||||
1663 | |||||||
1664 | index=>"index/key attribute" | ||||||
1665 | hashattributes=>["attrib","attrib","attrib"] | ||||||
1666 | |||||||
1667 | When running, this will create an MD5 hash of the concatentation of the specified attributes, and store it in a database under the specified index. Next time the job is run, it will hash the value again, and compare it with the last hashed value. If they are the same, the record will not be written to the target. These entries are stored in a SQLite database - if you want to manipulate the database directly, you can do so with a sqlite3 client. The SQLite database takes it's name from the 'jobname' attribute specified in $sync->new. If you didn't specify a jobname, it will default to 'noname' - so if you are running multiple jobs with attribute hashing in the same directory on your disk, it's important to make sure they have names. | ||||||
1668 | |||||||
1669 | |||||||
1670 | =head2 mappings | ||||||
1671 | |||||||
1672 | $sync->mappings(FIRSTNAME=>'givenName',SURNAME=>'sn'); | ||||||
1673 | |||||||
1674 | Maps individual field names from the data source, to their corresponding field names in the data target. | ||||||
1675 | |||||||
1676 | =head2 buildattributes | ||||||
1677 | |||||||
1678 | $sync->buildattributes(dn=>"cn=%NAME%,ou=testcontainer,dc=test,dc=org", | ||||||
1679 | objectclass=>"organizationalUnit"); | ||||||
1680 | |||||||
1681 | Builds new target attributes up from existing source attributes. A simple template form is used - the template should be a string variable, containing the source field name between % delimiters. If no % delimiters are found, the string will be written as a literal. | ||||||
1682 | |||||||
1683 | =head2 transforms | ||||||
1684 | |||||||
1685 | $sync->transforms( PHONE=>'s/0(\\d{4})/\+44 \(\$1\)/', | ||||||
1686 | OFFICE=>"stripspaces", | ||||||
1687 | ADDRESS=>sub{my $address=shift; | ||||||
1688 | $address=~s/\n/\ /g; |
||||||
1689 | return $address}); | ||||||
1690 | |||||||
1691 | Converts each field in the source data using the parameters passed. Each parameter pair is the I |
||||||
1692 | |||||||
1693 | stripspaces | ||||||
1694 | stripnewline | ||||||
1695 | uppercase | ||||||
1696 | lowercase | ||||||
1697 | concatenate | ||||||
1698 | |||||||
1699 | concatenate joins together the values of a multi valued attribute with the content of $sync->{mvseparator} - this defaults to | but can be changed with: | ||||||
1700 | |||||||
1701 | $sync->mvseparator(" |
||||||
1702 | |||||||
1703 | Transformations are recursive, so if you are importing some form of hierarchical data, the transformation will walk the tree until it finds a scalar (or a list, in the case of concatenate) that it can perform the transformation on. | ||||||
1704 | |||||||
1705 | Note: If passing a regex in a string, make sure you use single quotes. Double quotes will invite perl to interpolate the contents, with unexpected results. | ||||||
1706 | |||||||
1707 | =head2 validation | ||||||
1708 | |||||||
1709 | $sync->validation( address=>"/street/i", | ||||||
1710 | name=>"/(Dr|Mr|Mrs|Ms|Miss)/" ); | ||||||
1711 | |||||||
1712 | Validation defines pattern matches for attributes. The validation methods are the last to be called before writing. If any of the specified fields fail to match the specified pattern match, the whole validation will fail and the write will not happen. Validation is optional, you don't have to specify a validation set, but it's useful to ensure that the constructed record set is what you were expecting before you write it out. Validation is also recursive, so it will handle multi valued attributes and subtrees in LDAP. | ||||||
1713 | |||||||
1714 | =head2 deletes | ||||||
1715 | |||||||
1716 | $sync->deletes("delete"); | ||||||
1717 | |||||||
1718 | or | ||||||
1719 | |||||||
1720 | $sync->delete(attrib=>value,attrib=>value); | ||||||
1721 | |||||||
1722 | Define the action to be taken against entries that have been deleted. Note that this only works if hashing is defined in 'target'. The parameters can be "delete", in which case the entry is deleted, or attrib=>value pairs, in which case the specified attributes are changed to the string in 'value'. Note that it is impossible to do a 'transform' on the attribute value, as the data is no longer present once a delete has been detected. | ||||||
1723 | |||||||
1724 | |||||||
1725 | |||||||
1726 | =head2 save | ||||||
1727 | |||||||
1728 | $sync->save("filename"); | ||||||
1729 | |||||||
1730 | Saves the config to a Data::Dump::Streamer file. Returns 1 on success. | ||||||
1731 | |||||||
1732 | =head2 load | ||||||
1733 | |||||||
1734 | $sync->load("filename"); | ||||||
1735 | |||||||
1736 | Loads the config from a Data::Dump::Streamer file previously created with save. You still need to define the source and target db/ldap handles with source & target, but if you've loaded the config from a file you can omit the hash of options. | ||||||
1737 | |||||||
1738 | =head2 run | ||||||
1739 | |||||||
1740 | $sync->run() or die $sync->error."\n"; | ||||||
1741 | |||||||
1742 | No parameters. Reads the data from the source, converts and renames it as defined in mappings, buildattributes and transforms, and writes it to the target. | ||||||
1743 | |||||||
1744 | =head2 error | ||||||
1745 | |||||||
1746 | print $sync->error; | ||||||
1747 | |||||||
1748 | Returns the last error encountered by the module. This is set e.g. when a file fails to load correctly, when a sql error is encountered etc. When this occurs, the return value from the called function will be zero, and error() should be called to identify the problem. | ||||||
1749 | |||||||
1750 | =head2 lastruntime | ||||||
1751 | |||||||
1752 | print $sync->lastruntime; | ||||||
1753 | |||||||
1754 | Returns the last time the job was run as YYYYMMDDHHMMSS. This is saved in the config file. | ||||||
1755 | |||||||
1756 | =head2 mvseparator | ||||||
1757 | |||||||
1758 | $sync->mvseparator(" |
||||||
1759 | |||||||
1760 | print $sync->mvseparator(); | ||||||
1761 | |||||||
1762 | Sets or returns the multi valued attribute separator. (defaults to |) | ||||||
1763 | |||||||
1764 | =head2 commit | ||||||
1765 | |||||||
1766 | $sync->commit(); | ||||||
1767 | |||||||
1768 | Calls the write handle commit method, where the write handle is DBI (there's no rollback/commit available in LDAP). This is provided as a convenience, just in case you have autocommit turned off on your db handle. | ||||||
1769 | |||||||
1770 | =head1 SEE ALSO | ||||||
1771 | |||||||
1772 | More detail on extra methods, accessing the internal methods of Data::Sync from your code, and subclassing are detailed in Data::Sync::Advanced | ||||||
1773 | |||||||
1774 | =head1 PREREQS | ||||||
1775 | |||||||
1776 | If you want to save and load definition files, you'll need Data::Dump::Streamer | ||||||
1777 | |||||||
1778 | If you are using DBI datasources, you will need DBI & the appropriate DBI drivers. | ||||||
1779 | |||||||
1780 | If you are using LDAP datasources, you will need Net::LDAP. | ||||||
1781 | |||||||
1782 | If you are using attribute hashing, you will also need DBI & DBD::SQLite | ||||||
1783 | |||||||
1784 | =head1 VERSION | ||||||
1785 | |||||||
1786 | 0.07 | ||||||
1787 | |||||||
1788 | =head1 BUGS & CAVEATS | ||||||
1789 | |||||||
1790 | Data::Sync handles column/attribute names as case sensitive. Postgresql (at least at time of writing) appears to return column names as lc, whether they're created lc or not. I make no guarantees about this, but using lower case column names in all Data::Sync code seems to work OK. Please ensure that any code you write using this module with postgresql is particularly thoroughly tested. | ||||||
1791 | |||||||
1792 | =head1 TODO | ||||||
1793 | |||||||
1794 | Hashing individual attributes so if one attribute changes, only that attribute is written. | ||||||
1795 | |||||||
1796 | UI changes: instead of passing a handle, pass a type=>LDAP/DBI/Psql etc, and the connect parameters. Restructure in subclasses so that DBI is a generic sql, but can be subclassed by specific classes for particular datasources. This is a major change to the UI. write a handle factory to create the db/ldap handle. The AoH for records, and methods for read and write are required. This sits well with the dispatcher class. Retain handle passing UI facility if possible, but deprecate. | ||||||
1797 | |||||||
1798 | Implementing Data::Sync::Datasource as a dispatcher class redirecting to ::dbi & ::ldap would fit the UML model better, might make subclassing easier, and would probably be easier to maintain. For other divisions see Classes.dia | ||||||
1799 | |||||||
1800 | Validation can be extended for deletes by putting a % comparison of recordset vs delete set in 'run' | ||||||
1801 | |||||||
1802 | Multiple sources in a single job? Possibly by naming, and with a default name to support existing syntax. | ||||||
1803 | |||||||
1804 | Separate out the table creation methods into a subclass(?) and use a dispatcher? (table creation WIP) | ||||||
1805 | |||||||
1806 | Friendly CSV/TD source/target methods | ||||||
1807 | |||||||
1808 | Modular datasource/targets for including non dbi/ldap datasources? | ||||||
1809 | |||||||
1810 | Example using AnyData & XML | ||||||
1811 | |||||||
1812 | Multiple targets in a single job? | ||||||
1813 | |||||||
1814 | Caching? | ||||||
1815 | |||||||
1816 | UTF8/ANSI handling. | ||||||
1817 | |||||||
1818 | Perltidy the tests (thanks for spotting the mess Gavin) | ||||||
1819 | |||||||
1820 | =head1 CHANGES | ||||||
1821 | |||||||
1822 | v0.08 | ||||||
1823 | |||||||
1824 | Bugfix, added package statement to Advanced.pm to ensure correct CPAN indexing | ||||||
1825 | |||||||
1826 | v0.07 | ||||||
1827 | |||||||
1828 | Added in 'add if no index' facility on writedbi | ||||||
1829 | |||||||
1830 | Generalised delta code | ||||||
1831 | |||||||
1832 | Removed 'use DDS' and replaced with 'require' in save and load methods, so dependency is only if using those functions | ||||||
1833 | |||||||
1834 | Implemented ::deletes | ||||||
1835 | |||||||
1836 | Generalised sourceToAoH & write methods | ||||||
1837 | |||||||
1838 | Separated complex uses into Data::Sync::Advanced POD | ||||||
1839 | |||||||
1840 | Implemented get & put methods | ||||||
1841 | |||||||
1842 | Implemented hashrecord method | ||||||
1843 | |||||||
1844 | extended testing | ||||||
1845 | |||||||
1846 | v0.06 | ||||||
1847 | |||||||
1848 | Implemented a commit method for the writehandle. | ||||||
1849 | |||||||
1850 | Implemented validate function | ||||||
1851 | |||||||
1852 | v0.05 | ||||||
1853 | |||||||
1854 | Fixed some 0E0 problems with return values from DBI. | ||||||
1855 | |||||||
1856 | Added postgresql caveat. | ||||||
1857 | |||||||
1858 | Extended developer test suite to include MySQL & Postgresql | ||||||
1859 | |||||||
1860 | v0.04 | ||||||
1861 | |||||||
1862 | Implemented basic attribute hashing | ||||||
1863 | |||||||
1864 | Added concatenate function for multivalued ldap attributes | ||||||
1865 | |||||||
1866 | v0.03 | ||||||
1867 | |||||||
1868 | Added uppercase and lowercase transformations | ||||||
1869 | |||||||
1870 | Moved read and write subs out of anonymous blocks | ||||||
1871 | |||||||
1872 | hid raw regex in #!# |
||||||
1873 | |||||||
1874 | implemented batch updating | ||||||
1875 | |||||||
1876 | V0.02 | ||||||
1877 | |||||||
1878 | Implemented load & save functions. | ||||||
1879 | |||||||
1880 | Implemented error function | ||||||
1881 | |||||||
1882 | Modified stripnewlines to replace with whitespace. | ||||||
1883 | |||||||
1884 | =head1 COPYRIGHT | ||||||
1885 | |||||||
1886 | Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. | ||||||
1887 | |||||||
1888 | =head1 AUTHOR | ||||||
1889 | |||||||
1890 | Charles Colbourn | ||||||
1891 | |||||||
1892 | charlesc@g0n.net | ||||||
1893 | |||||||
1894 | =cut |