summaryrefslogtreecommitdiff
path: root/perl/Git/SVN/Memoize/YAML.pm
blob: 9676b8f2f73520735b68f6ff13d03d52575afaae (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
package Git::SVN::Memoize::YAML;
use warnings;
use strict;
use YAML::Any ();

# based on Memoize::Storable.

sub TIEHASH {
	my $package = shift;
	my $filename = shift;
	my $truehash = (-e $filename) ? YAML::Any::LoadFile($filename) : {};
	my $self = {FILENAME => $filename, H => $truehash};
	bless $self => $package;
}

sub STORE {
	my $self = shift;
	$self->{H}{$_[0]} = $_[1];
}

sub FETCH {
	my $self = shift;
	$self->{H}{$_[0]};
}

sub EXISTS {
	my $self = shift;
	exists $self->{H}{$_[0]};
}

sub DESTROY {
	my $self = shift;
	YAML::Any::DumpFile($self->{FILENAME}, $self->{H});
}

sub SCALAR {
	my $self = shift;
	scalar(%{$self->{H}});
}

sub FIRSTKEY {
	'Fake hash from Git::SVN::Memoize::YAML';
}

sub NEXTKEY {
	undef;
}

1;
__END__

=head1 NAME

Git::SVN::Memoize::YAML - store Memoized data in YAML format

=head1 SYNOPSIS

    use Memoize;
    use Git::SVN::Memoize::YAML;

    tie my %cache => 'Git::SVN::Memoize::YAML', $filename;
    memoize('slow_function', SCALAR_CACHE => [HASH => \%cache]);
    slow_function(arguments);

=head1 DESCRIPTION

This module provides a class that can be used to tie a hash to a
YAML file.  The file is read when the hash is initialized and
rewritten when the hash is destroyed.

The intent is to allow L<Memoize> to back its cache with a file in
YAML format, just like L<Memoize::Storable> allows L<Memoize> to
back its cache with a file in Storable format.  Unlike the Storable
format, the YAML format is platform-independent and fairly stable.

Carps on error.

=head1 DIAGNOSTICS

See L<YAML::Any>.

=head1 DEPENDENCIES

L<YAML::Any> from CPAN.

=head1 INCOMPATIBILITIES

None reported.

=head1 BUGS

The entire cache is read into a Perl hash when loading the file,
so this is not very scalable.